Spaces:
Runtime error
Runtime error
| import requests | |
| # from flask_cors import CORS | |
| from groq import Groq | |
| # CORS() | |
| # GROQ_API_URL = "https://api.groq.com/translate" # استبدل هذا بعنوان API الخاص بـ Groq | |
| GROQ_API_KEY = "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" # ضع هنا مفتاح API الخاص بك | |
| client = Groq(api_key=GROQ_API_KEY) | |
| def translate_texts_groq(texts, target_language="ar"): | |
| """ | |
| Translates a list of texts to the target language using Groq. | |
| :param texts: List of texts to translate | |
| :param target_language: Target language code | |
| :return: List of translated texts | |
| """ | |
| # headers = { | |
| # "Authorization": f"Bearer {GROQ_API_KEY}", | |
| # "Content-Type": "application/json" | |
| # } | |
| translated_texts = [] | |
| # for text in texts: | |
| # payload = { | |
| # "source_language": "en", # تأكد من ضبط لغة المصدر | |
| # "target_language": target_language, | |
| # "text": text | |
| # } | |
| # response = requests.post(GROQ_API_URL, json=payload, headers=headers) | |
| # if response.status_code == 200: | |
| # translated_text = response.json().get("translated_text") | |
| # translated_texts.append(translated_text) | |
| # else: | |
| # # في حالة حدوث خطأ، يمكنك التعامل معه هنا | |
| # translated_texts.append(text) # احتفظ بالنص الأصلي في حال فشل الترجمة | |
| chat_completion = client.chat.completions.create( | |
| messages=[ | |
| { | |
| "role": "user", | |
| "content": f"Translate the text fllowing into Arabic language:{texts}", | |
| } | |
| ], | |
| model="llama3-8b-8192", | |
| ) | |
| translated_texts.append(chat_completion.choices[0].message.content) | |
| return translated_texts | |
| def translate_content(content, target_language): | |
| """ | |
| Translates the content of a markdown file, preserving non-translatable parts. | |
| :param content: Markdown content | |
| :param target_language: Target language code | |
| :return: Translated content | |
| """ | |
| translatable_texts = extract_translatable_text(content) | |
| translated_texts = translate_texts_groq(translatable_texts, target_language) | |
| # Reconstruct the content with translated texts | |
| for original, translated in zip(translatable_texts, translated_texts): | |
| content = content.replace(original, translated) | |
| return content | |
| # from transformers import MarianMTModel, MarianTokenizer | |
| # # Initialize the model and tokenizer | |
| # model_name = 'Helsinki-NLP/opus-mt-en-<target_language>' # Replace <target_language> with the target language code | |
| # model = MarianMTModel.from_pretrained(model_name) | |
| # tokenizer = MarianTokenizer.from_pretrained(model_name) | |
| # def translate_texts(texts, target_language): | |
| # """ | |
| # Translates a list of texts to the target language. | |
| # :param texts: List of texts to translate | |
| # :param target_language: Target language code | |
| # :return: List of translated texts | |
| # """ | |
| # translated_texts = [] | |
| # for text in texts: | |
| # inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True) | |
| # translated = model.generate(**inputs) | |
| # translated_text = tokenizer.decode(translated[0], skip_special_tokens=True) | |
| # translated_texts.append(translated_text) | |
| # return translated_texts | |
| # def translate_content(content, target_language): | |
| # """ | |
| # Translates the content of a markdown file, preserving non-translatable parts. | |
| # :param content: Markdown content | |
| # :param target_language: Target language code | |
| # :return: Translated content | |
| # """ | |
| # translatable_texts = extract_translatable_text(content) | |
| # translated_texts = translate_texts(translatable_texts, target_language) | |
| # # Reconstruct the content with translated texts | |
| # for original, translated in zip(translatable_texts, translated_texts): | |
| # content = content.replace(original, translated) | |
| # return content | |