kevinhug commited on
Commit
5e4395c
·
1 Parent(s): 3b2c402
Files changed (2) hide show
  1. graphrag.py +5 -5
  2. requirements.txt +2 -2
graphrag.py CHANGED
@@ -13,26 +13,26 @@ from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
13
 
14
  llm = HuggingFaceInferenceAPI(temperature=0.2, model_name="meta-llama/Llama-3.2-1B")
15
 
16
- """
17
-
18
- # SEE: https://huggingface.co/docs/hub/security-tokens
19
- # We just need a token with read permissions for this demo
20
  HF_TOKEN= os.environ["HF_TOKEN"]
21
 
22
  from llama_index.llms.litellm import LiteLLM
23
  llm = LiteLLM("huggingface/meta-llama/Llama-3.2-1B")
 
24
 
25
  import networkx as nx
26
  import matplotlib.pyplot as plt
27
  import pandas as pd
28
  import numpy as np
29
-
30
  from langchain_experimental.graph_transformers import LLMGraphTransformer
31
  from langchain.chains import GraphQAChain
32
  from langchain_core.documents import Document
33
  from langchain_community.graphs.networkx_graph import NetworkxEntityGraph
34
 
 
35
 
 
 
36
 
37
  customer="Low APR and great customer service. I would highly recommend if you’re looking for a great credit card company and looking to rebuild your credit. I have had my credit limit increased annually and the annual fee is very low."
38
 
 
13
 
14
  llm = HuggingFaceInferenceAPI(temperature=0.2, model_name="meta-llama/Llama-3.2-1B")
15
 
 
 
 
 
16
  HF_TOKEN= os.environ["HF_TOKEN"]
17
 
18
  from llama_index.llms.litellm import LiteLLM
19
  llm = LiteLLM("huggingface/meta-llama/Llama-3.2-1B")
20
+ """
21
 
22
  import networkx as nx
23
  import matplotlib.pyplot as plt
24
  import pandas as pd
25
  import numpy as np
26
+ from langchain_groq import ChatGroq
27
  from langchain_experimental.graph_transformers import LLMGraphTransformer
28
  from langchain.chains import GraphQAChain
29
  from langchain_core.documents import Document
30
  from langchain_community.graphs.networkx_graph import NetworkxEntityGraph
31
 
32
+ GROQ_API_KEY = os.environ.get('GROQ_API_KEY')
33
 
34
+ # Set up LLM and Flux client
35
+ llm = ChatGroq(temperature=0, model_name='llama-3.1-8b-instant', groq_api_key=GROQ_API_KEY)
36
 
37
  customer="Low APR and great customer service. I would highly recommend if you’re looking for a great credit card company and looking to rebuild your credit. I have had my credit limit increased annually and the annual fee is very low."
38
 
requirements.txt CHANGED
@@ -11,7 +11,7 @@ llama-index
11
  faiss-cpu
12
  tavily-python
13
 
14
- llama-index-llms-litellm
15
 
16
  #llama-index-llms-huggingface-api
17
  #huggingface_hub[inference]
@@ -19,7 +19,7 @@ llama-index-llms-litellm
19
  networkx
20
  matplotlib
21
  langchain-experimental
22
- #langchain-groq
23
  langchain-community
24
  pandas
25
  #gradio-client
 
11
  faiss-cpu
12
  tavily-python
13
 
14
+ #llama-index-llms-litellm
15
 
16
  #llama-index-llms-huggingface-api
17
  #huggingface_hub[inference]
 
19
  networkx
20
  matplotlib
21
  langchain-experimental
22
+ langchain-groq
23
  langchain-community
24
  pandas
25
  #gradio-client