{"flow":{"id":36,"summary":"Do sentiment analysis on recent Reddit mentions","versions":[148,149,163],"created_by":"adam186","created_at":"2022-12-29T13:09:18.197Z","votes":0,"approved":true,"apps":["reddit","openai"],"value":{"modules":[{"id":"d","value":{"lock":"","path":"hub/409/reddit/trigger_everytime_a_new_item_post_on_reddit_matches_at_least_one_mention_","type":"rawscript","content":"import {\n  getState,\n  setState,\n} from \"https://deno.land/x/windmill@v1.88.1/mod.ts\";\n\n/**\n * @returns A list of posts that mentioned given phrases in ascending order\n * of date of publication.\n *\n * @param mentions Case **insensitive** phrases that should be searched for.\n */\nexport async function main(\n  subreddit: string,\n  mentions: string[],\n  max_number_of_posts: number = 100,\n  reset_state = false,\n) {\n  if (reset_state) {\n    await setState(\"\");\n  }\n\n  mentions = mentions?.map((m) => m.trim().toLowerCase())?.filter(Boolean);\n  if (!mentions?.length) {\n    throw Error(\"\\nAt least one mentioned phrase is needed to search for.\");\n  }\n\n  const url = new URL(`https://www.reddit.com/r/${subreddit}/new.json`);\n  if (max_number_of_posts < 1) max_number_of_posts = 1;\n  const limit = 100;\n  const processedPosts: Post[] = [];\n  let runLoop = true;\n  do {\n    const lastPostId = await getState();\n    url.searchParams.append(\"limit\", limit.toString());\n    lastPostId && url.searchParams.append(\"before\", lastPostId);\n    const result = await fetch(url);\n    if (!result.ok) {\n      throw Error(\"\\n\" + await result.text());\n    }\n    const data = (await result.json()).data;\n    const posts: Post[] = data.children.map(\n      ({ data: { name, title, selftext, url } }: { data: Post }) => {\n        return { name, title, selftext, url };\n      },\n    );\n    if (!posts.length) {\n      return processedPosts;\n    } else if (posts.length < limit) {\n      runLoop = false;\n    }\n\n    await setState(posts[0].name);\n    const temp: Post[] = [];\n    for (let i = 0; i < posts.length; i++) {\n      const post = posts[i];\n      const mention = mentions.find((mention) =>\n        post.title.toLowerCase().includes(mention)\n      ) ||\n        mentions.find((mention) =>\n          post.selftext.toLowerCase().includes(mention)\n        );\n      mention && temp.push(post);\n      if ((processedPosts.length + temp.length) >= max_number_of_posts) {\n        break;\n      }\n    }\n    processedPosts.push(...temp.reverse());\n    if (processedPosts.length >= max_number_of_posts) {\n      runLoop = false;\n    }\n  } while (runLoop);\n\n  return processedPosts;\n}\n\ninterface Post {\n  name: string;\n  title: string;\n  selftext: string;\n  url: string;\n}\n","language":"deno","input_transforms":{"mentions":{"expr":"flow_input.phrases","type":"javascript"},"subreddit":{"expr":"flow_input.subreddit","type":"javascript"},"reset_state":{"expr":"flow_input.reset_state","type":"javascript"},"max_number_of_posts":{"expr":"flow_input.max_posts","type":"javascript"}}},"summary":"Get Reddit mentions - trigger","stop_after_if":null,"input_transforms":{}},{"id":"b","value":{"path":"hub/402/openai/create_completion","type":"rawscript","content":"import type { Resource } from \"https://deno.land/x/windmill@v1.52.0/mod.ts\";\nimport { removeObjectEmptyFields } from 'https://deno.land/x/windmill_helpers@v1.0.0/mod.ts'\nimport { Configuration, CreateCompletionRequest, OpenAIApi } from \"npm:openai@3.1.0\"\n\n/**\n * You can read about the parameters at \n * https://beta.openai.com/docs/api-reference/completions/create\n */\nexport async function main(\n  auth: Resource<'openai'>,\n  model = 'text-davinci-003',\n  prompt?: string,\n  suffix?: string,\n  max_tokens?: number,\n  temperature?: number,\n  top_p?: number,\n  n?: number,\n  stream?: boolean,\n  logprobs?: number,\n  echo?: boolean,\n  stop?: string,\n  presence_penalty?: number,\n  frequency_penalty?: number,\n  best_of?: number,\n  logit_bias?: object\n) {\n  if(!prompt) {\n    throw Error('No mentiones were found.')\n  }\n  const configuration = new Configuration({\n    apiKey: auth.api_key,\n    organization: auth.organization_id\n  });\n  const openai = new OpenAIApi(configuration);\n\n  const request = removeObjectEmptyFields({\n    model,\n    prompt,\n    suffix,\n    max_tokens,\n    temperature,\n    top_p,\n    n,\n    stream,\n    logprobs,\n    echo,\n    stop,\n    presence_penalty,\n    frequency_penalty,\n    best_of,\n    logit_bias\n  }) as CreateCompletionRequest\n  const response = await openai.createCompletion(request)\n  return response.data\n}\n","language":"deno","input_transforms":{"n":{"type":"static","value":null},"auth":{"expr":"flow_input.openai_auth","type":"javascript"},"echo":{"type":"static","value":false},"stop":{"type":"static","value":""},"model":{"type":"static","value":"text-davinci-003"},"top_p":{"type":"static","value":1},"prompt":{"expr":"results.d.length ? `Classify the sentiment of the posts based on how they relate to the following keywords.\nThe sentiments can only be one these three: POSITIVE, NEUTRAL, NEGATIVE.\nPlace every sentiment rating on a new line.\n=======\nKEYWORDS:\n${flow_input.phrases.join(', ')}\n=======\nPOSTS:\n${results.d.map((m, i) => {\n  return `POST ${i + 1}. TITLE:\n${m.title.replaceAll('\"', '\\'')}\nPOST ${i + 1}. BODY:\n${m.selftext.replaceAll('\"', '\\'')}\n-------\n`\n})}\n=======\nSENTIMENT RATINGS:\n` : undefined","type":"javascript"},"stream":{"type":"static","value":false},"suffix":{"type":"static","value":""},"best_of":{"type":"static","value":null},"logprobs":{"type":"static","value":null},"logit_bias":{"type":"static","value":null},"max_tokens":{"type":"static","value":60},"temperature":{"type":"static","value":0},"presence_penalty":{"type":"static","value":0},"frequency_penalty":{"type":"static","value":0}}}},{"id":"c","value":{"lock":"","path":null,"type":"rawscript","content":"export async function main(mentions: Record<string, string>[], sentiments: string) {\n  const sentimentArray = sentiments.trim().split('\\n')\n  return mentions.map(({title, url}, i) => {\n    return {\n      title,\n      url,\n      sentiment: sentimentArray[i]\n    }\n  })\n}\n","language":"deno","input_transforms":{"mentions":{"expr":"results.d","type":"javascript"},"sentiments":{"expr":"results.b.choices[0].text","type":"javascript"}}},"summary":"Format results","stop_after_if":null,"input_transforms":{}}],"failure_module":null},"schema":{"type":"object","$schema":"https://json-schema.org/draft/2020-12/schema","required":["phrases","subreddit","openai_auth","max_posts"],"properties":{"phrases":{"type":"array","items":{"type":"string"},"format":"","description":""},"max_posts":{"type":"number","format":"","default":10,"description":"Maximum number of posts to check in one run. Setting this to a low number doesn't mean you'll lose out on posts, but that you'll need to run the flow more frequently."},"subreddit":{"type":"string","format":"","default":"","description":""},"openai_auth":{"type":"object","format":"resource-openai","default":"","description":""},"reset_state":{"type":"boolean","format":"","default":"","description":"The state stores the post that is chronologically the last and was already processed. This means only those posts will be checked, which were submitted after the one saved to the state."}}},"description":"Get recent posts from a subreddit that mention given phrases and do sentiment analysis on them.","recording":null,"vcreated_at":"2023-04-19T16:54:02.266Z","vcreated_by":"adam186","comments":[]}}