flowify-backend / executor /workflow.py
Maouu's picture
Upload 24 files
ce4e319 verified
from nodes.scraping.html import *
from nodes.llm.textmodel import ai_response as generate_ai_response
from nodes.socialmedia.x import *
from nodes.socialmedia.reddit import reddit_post
from nodes.processing.list import *
#updating the executor to process multiple nodes for better execution (Mon jan 6 2025)
def execute(workflow):
print(workflow)
# Nodes that do not depend on any other node for input
unique_node = []
# Nodes that depend on other nodes for input
dependant_node = []
# Temporary storage for saving the output of the nodes
temp_data = []
# Assigning unique or dependent nodes to their respective array
for step in workflow['steps']:
# Check if 'config' contains a reference to other nodes
is_dependant = False
if 'config' in step and isinstance(step['config'], dict):
for key, value in step['config'].items():
# If the value contains a reference like 'node-1', 'node-2', etc.
if isinstance(value, str) and value.startswith('node-'):
is_dependant = True
break
if is_dependant:
dependant_node.append(step)
else:
unique_node.append(step)
for step in workflow['steps']:
print("executing step", step['node'], step['type'])
if step['type'] == "scrape_html":
for temp in temp_data:
if temp['node'] == step['config']['url']:
print(temp['data'])
data = scrape_html(temp['data'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
data = scrape_html(step['config']['url'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "scrape_images":
for temp in temp_data:
if temp['node'] == step['config']['data']:
filter = step['config']['filter']
data = scrape_images(data=temp['data'], filter=filter)
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
data = step['config']['data']
filter = step['config']['filter']
response = scrape_images(data=data, filter=filter)
temp_data.append({'node': step['node'], 'data': response, 'type':step['type']})
elif step['type'] == "scrape_links":
for temp in temp_data:
if temp['node'] == step['config']['data']:
data = scrape_links(url = temp['data'], filter=step['config']['filter'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
data = scrape_links(url = step['config']['data'], filter=step['config']['filter'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "scrape_metadata":
for temp in temp_data:
if temp['node'] == step['config']['data']:
data = scrape_text(temp['data'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
data = step['config']['data']
scrape_text(data)
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "scrape_text":
for temp in temp_data:
if temp['node'] == step['config']['data']:
data = scrape_text(str(temp['data']))
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
data = step['config']['data']
scrape_text(data)
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "create_tweet":
for temp in temp_data:
print("node", temp['node'])
if temp['node'] == step['config']['data']:
print('foudntemp node')
login = step['config']['login']
data = create_tweet(text=temp['data'], login=login)
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
print('not found node')
data = step['config']['data']
login = step['config']['login']
data = create_tweet(text=data, login=login)
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "create_reddit":
for temp in temp_data:
print("node", temp['node'])
if temp['node'] == step['config']['data']:
print('foud temp node')
username = step['config']['username']
password = step['config']['password']
subreddit = step['config']['subreddit']
client_id = step['config']['id']
client_secret = step['config']['secret']
title = step['config']['secret']
data = reddit_post(client_id, client_secret, username, password, subreddit, title, body=temp['data'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
username = step['config']['username']
password = step['config']['password']
subreddit = step['config']['subreddit']
client_id = step['config']['id']
client_secret = step['config']['secret']
title = step['config']['secret']
data = reddit_post(client_id, client_secret, username, password, subreddit, title, body=step['config']['data'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step["type"] == "ai_response":
for temp in temp_data:
if temp['node'] == step['config']['data']:
data = generate_ai_response(step['config']['prompt'] + str(temp['data']))
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
data = generate_ai_response(prompt=step['config']['prompt'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "scrape_div":
for temp in temp_data:
if temp['node'] == step['config']['data']:
data = scrape_div(data=temp['data'], div=step['config']['class'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
data = step['config']['data']
class_ = step['config']['class']
data = scrape_div(data=data, div=class_)
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "extract_element":
for temp in temp_data:
if temp['node'] == step['config']['data']:
print(step['config']['data'])
data = extract_element(list_=temp['data'], index=step['config']['index'], value=step['config']['value'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
break
else:
data = extract_element(list_=step['config']['data'], index=step['config']['index'], value=step['config']['value'])
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "create_list":
data = step['config']['data']
# Convert string representation of list to actual list
if isinstance(data, str):
try:
data = eval(data)
except:
pass
temp_data.append({'node': step['node'], 'data': data, 'type':step['type']})
elif step['type'] == "reddit_post":
title = step['config']['title']
content = step['config']['data']
# Check if title is from another node
for temp in temp_data:
if temp['node'] == title:
title = temp['data']
break
# Check if content is from another node
for temp in temp_data:
if temp['node'] == content:
content = temp['data']
break
data = reddit_post(
client_id=step['config']['client_id'],
client_secret=step['config']['client_secret'],
username=step['config']['username'],
password=step['config']['password'],
subreddit=step['config']['subreddit'],
title=title,
body=content
)
temp_data.append({'node': step['node'], 'data': data, 'type': step['type']})
return temp_data