Example TensorFlow

 

1. TensorFlow – Image Classification (MNIST Digits)

python
import tensorflow as tf from tensorflow.keras.datasets import mnist from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Flatten # Load and preprocess data (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train, x_test = x_train / 255.0, x_test / 255.0 # Build model model = Sequential([ Flatten(input_shape=(28, 28)), Dense(128, activation='relu'), Dense(10, activation='softmax') ]) # Compile and train model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy']) model.fit(x_train, y_train, epochs=5) # Evaluate model.evaluate(x_test, y_test)

2. PyTorch – Sentiment Analysis with LSTM

python
import torch import torch.nn as nn from torchtext.legacy.datasets import IMDB from torchtext.legacy.data import Field, BucketIterator # Define fields TEXT = Field(tokenize='spacy', include_lengths=True) LABEL = Field(sequential=False, dtype=torch.float) # Load dataset train_data, test_data = IMDB.splits(TEXT, LABEL) # Build vocab TEXT.build_vocab(train_data, max_size=10000) LABEL.build_vocab(train_data) # Create iterators train_iter, test_iter = BucketIterator.splits((train_data, test_data), batch_size=64, sort_within_batch=True) # LSTM model class LSTMModel(nn.Module): def __init__(self, vocab_size, embedding_dim, hidden_dim): super(LSTMModel, self).__init__() self.embedding = nn.Embedding(vocab_size, embedding_dim) self.lstm = nn.LSTM(embedding_dim, hidden_dim) self.fc = nn.Linear(hidden_dim, 1) def forward(self, text, text_lengths): embedded = self.embedding(text) packed = nn.utils.rnn.pack_padded_sequence(embedded, text_lengths) output, (hidden, _) = self.lstm(packed) return torch.sigmoid(self.fc(hidden[-1])) # Initialize and train model (training loop not shown for brevity)

3. Hugging Face Transformers – Text Summarization

python
from transformers import pipeline # Load summarization pipeline summarizer = pipeline("summarization") # Input text text = """ The Eiffel Tower is one of the most iconic structures in the world, located in Paris, France. It was completed in 1889 and stands at 324 meters tall. Millions of tourists visit it every year to enjoy the view and its architectural beauty. """ # Generate summary summary = summarizer(text, max_length=50, min_length=20, do_sample=False) print(summary[0]['summary_text'])

4. Keras – Digit Recognition with MNIST

python
from keras.models import Sequential from keras.layers import Dense, Flatten from keras.datasets import mnist from keras.utils import to_categorical # Load data (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train, x_test = x_train / 255.0, x_test / 255.0 y_train, y_test = to_categorical(y_train), to_categorical(y_test) # Build model model = Sequential([ Flatten(input_shape=(28, 28)), Dense(128, activation='relu'), Dense(10, activation='softmax') ]) # Compile and train model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) model.fit(x_train, y_train, epochs=5) # Evaluate model.evaluate(x_test, y_test)

5. OpenAI GPT API – Chatbot Response (Python & API Key Required)

python
import openai openai.api_key = "your-api-key" response = openai.ChatCompletion.create( model="gpt-4", messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "What is the capital of France?"} ] ) print(response['choices'][0]['message']['content'])

🔐 Note: Replace "your-api-key" with your actual OpenAI API key from https://platform.openai.com.


Let me know if you want any of these converted into notebooks or expanded tutorials!

Comments

Popular posts from this blog

ree image-to-video generator tools

ml