import gradio as gr from huggingface_hub import InferenceClient """ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference """ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") # Define system message default_system_message = """ You are Veshon, the official AI assistant for Veshup. Veshup is a fashion-tech platform aimed at solving challenges in the fashion industry and e-commerce using futuristic technologies. Your role is to provide expert advice about Veshup's services, mission, and goals. Always focus on: - Enhancing the social experience for fashion enthusiasts. - Promoting good fashion awareness and sustainable practices. - Assisting with outfit recreation and try-ons. - Helping brands showcase their products effectively. - Aligning your responses with Veshup's vision to become a unicorn company within a year. """ # Knowledge base knowledge_base = { "founders": "Kishan Karyappa K and Jayaprakash P", "mission": "To integrate futuristic technologies to solve challenges in the fashion industry and fashion e-commerce.", "vision": "To become a unicorn company within a year by revolutionizing the fashion tech space.", "objectives": [ "Promote sustainable and ethical fashion practices.", "Provide AI-powered tools for virtual try-ons and outfit recreation.", "Enhance the online fashion community experience.", "Help brands showcase products innovatively.", "Simplify daily fashion decisions for users." ], "current_focus": [ "Building the Veshup website.", "Securing domains (.in and .com).", "Implementing frugal solutions to overcome technical challenges.", "Enhancing user engagement with innovative tools." ], "fashion_tips": [ "Pair bold prints with neutral colors to balance your outfit.", "Monochromatic outfits can make you look taller and slimmer.", "Layering adds depth and interest to simple outfits.", "Invest in timeless pieces like a well-fitted blazer or classic jeans.", "Accessorize with a statement piece to elevate your look.", "Dress according to the occasion and weather for maximum comfort." ], "trends_2024": [ "Sustainable and recycled materials are becoming mainstream.", "Techwear and futuristic designs are on the rise.", "Bold color blocking is a key trend this season.", "Vintage and retro-inspired looks are making a strong comeback.", "Customization and personalization in fashion are gaining popularity." ], "fashion_facts": [ "The global fashion industry is worth over $2.5 trillion.", "Fast fashion contributes significantly to environmental pollution.", "The average person wears only 20% of their wardrobe regularly.", "Colors like red and black have psychological impacts on perception.", "Synthetic fabrics like polyester take hundreds of years to decompose." ], "combination_tips": [ "Pair white sneakers with jeans and a casual shirt for a relaxed look.", "A leather jacket works great with a floral dress for edgy chic.", "Denim on denim is trending—contrast light and dark washes.", "Use a scarf to add a pop of color to neutral outfits.", "Balance oversized pieces with fitted items for a flattering silhouette." ] } # Respond function def respond( message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p, ): # Build the conversation context messages = [{"role": "system", "content": system_message}] for user_message, bot_response in history: if user_message: messages.append({"role": "user", "content": user_message}) if bot_response: messages.append({"role": "assistant", "content": bot_response}) messages.append({"role": "user", "content": message}) # Check if the user's message matches the knowledge base topics enriched_message = message.lower() for key, value in knowledge_base.items(): if key in enriched_message: if isinstance(value, list): return f"{key.capitalize()}: {', '.join(value)}" return f"{key.capitalize()}: {value}" # Generate response using the client response = "" for message in client.chat_completion( messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p, ): token = message.choices[0].delta.content response += token yield response # Chat interface demo = gr.ChatInterface( respond, additional_inputs=[ gr.Textbox(value=default_system_message, label="System message"), gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"), gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"), gr.Slider( minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)", ), ], title="Veshon - Your Fashion-Tech Assistant", description="Meet Veshon, the AI chatbot dedicated to solving challenges in the fashion industry and e-commerce. Ask anything about Veshup and its mission!", theme="default", ) if __name__ == "__main__": demo.launch()