File size: 2,408 Bytes
ebd06cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import together
import os
import logging,json
from typing import Any, Dict, List, Mapping, Optional

from pydantic import Extra, Field #, root_validator, model_validator

from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens
from langchain.utils import get_from_dict_or_env

class TogetherLLM(LLM):
    """Together large language models."""

    model_name: str = "togethercomputer/llama-2-70b-chat"
    """model endpoint to use"""

    together_api_key: str = os.environ["TOGETHER_API_KEY"]
    """Together API key"""

    temperature: float = 0.7
    """What sampling temperature to use."""

    max_tokens: int = 512
    """The maximum number of tokens to generate in the completion."""

    class Config:
        extra = Extra.forbid

    #@model_validator(mode="after")
    #def validate_environment(cls, values: Dict) -> Dict:
    #    """Validate that the API key is set."""
    #    api_key = get_from_dict_or_env(
    #        values, "together_api_key", "TOGETHER_API_KEY"
    #    )
    #    values["together_api_key"] = api_key
    #    return values

    @property
    def _llm_type(self) -> str:
        """Return type of LLM."""
        return "together"

    def _call(
        self,
        prompt: str,
        **kwargs: Any,
    ) -> str:
        """Call to Together endpoint."""
        together.api_key = self.together_api_key
        output = together.Complete.create(prompt,
                                          model=self.model_name,
                                          max_tokens=self.max_tokens,
                                          temperature=self.temperature,
                                          )
        text = output['output']['choices'][0]['text']
        return text

    def extractJson(self,val:str) -> Any:
        """Helper function to extract json from this LLMs output"""
        #This is assuming the json is the first item within ````
        v2=val.replace("```json","```").split("```")[1]
        v3=v2.replace("\n","").replace("\r","")
        v4=json.loads(v3)
        return v4
    
    def extractPython(self,val:str) -> Any:
        """Helper function to extract python from this LLMs output"""
        #This is assuming the python is the first item within ````
        v2=val.replace("```python","```").split("```")[1]
        return v2