diff --git a/crystallib/clients/canva/readme.md b/crystallib/clients/canva/readme.md index 43aaf1a62..08c0e708b 100644 --- a/crystallib/clients/canva/readme.md +++ b/crystallib/clients/canva/readme.md @@ -1,28 +1,45 @@ -# canva +# Canva Module -> TODO: this module needs to be finished, just rough start - -To get started - -```vlang +This module provides a V client for interacting with the Canva API, enabling programmatic access to Canva's platform features. +## Setup +1. Create an account on [Canva Developer Portal](https://www.canva.com/developers/) +2. Create or select your application +3. Generate an API token +4. Configure the client using heroscript: +```v import freeflowuniverse.crystallib.clients.canva -mut client:= canva.get()! +heroscript := " +!!canva.configure + name:'my_instance' + secret:'your-api-token-here' +" -client... +// Apply the configuration (only needs to be done once) +canva.play(heroscript: heroscript)! +``` +## Usage +### Initialize Client +```v +// Get a configured client instance +mut cl := canva.get(name: 'my_instance')! ``` -## example heroscript +### Examples -```hero -!!canva.configure - secret: '...' -``` +#### Download a Design +```v +// Get client instance +mut cl := canva.get('my_instance')! +// Download a design by ID +design_result := cl.download('your-design-id')! +println('Design downloaded: ${design_result}') +``` diff --git a/crystallib/clients/httpconnection/readme.md b/crystallib/clients/httpconnection/readme.md index 97a9c603f..e26c463fc 100644 --- a/crystallib/clients/httpconnection/readme.md +++ b/crystallib/clients/httpconnection/readme.md @@ -117,55 +117,3 @@ fn delete_ssh_key(mut conn HTTPConnection, fingerprint string) ! { )! } ``` - -## Custom Headers - -You can set default headers for all requests or specify headers for individual requests: - -```v -import net.http { Header } - -// Set default headers for all requests -conn.default_header = http.new_header( - key: .authorization - value: 'Bearer your-token-here' -) - -// Add custom headers for specific request -response := conn.get_json( - method: .get - prefix: 'protected/resource' - header: http.new_header( - key: .content_type - value: 'application/json' - ) -)! -``` - -## Error Handling - -The module uses V's built-in error handling. All methods that can fail return a Result type: - -```v -// Handle potential errors -user := conn.get_json_generic[User]( - method: .get - prefix: 'users/1' -) or { - println('Error: ${err}') - return -} -``` - -## Cache Configuration - -The module supports caching of responses. Configure caching behavior through the `CacheConfig` struct: - -```v -mut conn := HTTPConnection{ - base_url: 'https://api.example.com' - cache: CacheConfig{ - enabled: true - // Add other cache configuration as needed - } -} diff --git a/crystallib/clients/httpconnection/request.v b/crystallib/clients/httpconnection/request.v index 95837f446..99a5d97f5 100644 --- a/crystallib/clients/httpconnection/request.v +++ b/crystallib/clients/httpconnection/request.v @@ -23,3 +23,19 @@ pub mut: debug bool dataformat DataFormat } + + + + +// // set a custom hdeader on the request +// // ```v +// // import net.http { Header } +// // header: http.new_header( +// // key: .content_type +// // value: 'application/json' +// // ) +// // )! +// // ``` +// fn (mut r Request) header_set(header Header) { +// r.header = header +// } diff --git a/crystallib/clients/openai/.heroscript b/crystallib/clients/openai/.heroscript new file mode 100644 index 000000000..ff71c5469 --- /dev/null +++ b/crystallib/clients/openai/.heroscript @@ -0,0 +1,8 @@ + +!!hero_code.generate_client + name:'openai' + classname:'OpenAIClient' + singleton:0 + default:1 + hasconfig:1 + reset:0 \ No newline at end of file diff --git a/crystallib/clients/openai/actions.v b/crystallib/clients/openai/actions.v deleted file mode 100644 index d0a561e91..000000000 --- a/crystallib/clients/openai/actions.v +++ /dev/null @@ -1,23 +0,0 @@ -module openai - -// run heroscript starting from path, text or giturl -//``` -// !!OpenAIclient.define -// name:'default' -// openaikey: '' -// description:'...' -//``` -pub fn heroplay(mut plbook playbook.PlayBook) ! { - for mut action in plbook.find(filter: 'openaiclient.define')! { - mut p := action.params - instance := p.get_default('instance', 'default')! - // cfg.keyname = p.get('keyname')! - mut cl := get(instance, - openaikey: p.get('openaikey')! - description: p.get_default('description', '')! - )! - cl.config_save()! - } -} - -//>TODO: this needs to be extended to chats, ... diff --git a/crystallib/clients/openai/audio.v b/crystallib/clients/openai/audio.v index 4fc0a2d3d..1df1bd7c4 100644 --- a/crystallib/clients/openai/audio.v +++ b/crystallib/clients/openai/audio.v @@ -102,7 +102,8 @@ fn (mut f OpenAIClient[Config]) create_audio_request(args AudioArgs, endpoint st req := httpconnection.Request{ prefix: endpoint } - r := f.connection.post_multi_part(req, form)! + mut conn := f.connection()! + r := conn.post_multi_part(req, form)! if r.status_code != 200 { return error('got error from server: ${r.body}') } diff --git a/crystallib/clients/openai/completions.v b/crystallib/clients/openai/completions.v index 8ed34b22b..91496e750 100644 --- a/crystallib/clients/openai/completions.v +++ b/crystallib/clients/openai/completions.v @@ -63,7 +63,8 @@ pub fn (mut f OpenAIClient[Config]) chat_completion(model_type ModelType, msgs M m.messages << mr } data := json.encode(m) - r := f.connection.post_json_str(prefix: 'chat/completions', data: data)! + mut conn := f.connection()! + r := conn.post_json_str(prefix: 'chat/completions', data: data)! res := json.decode(ChatCompletion, r)! return res diff --git a/crystallib/clients/openai/embeddings.v b/crystallib/clients/openai/embeddings.v index f179e2910..e2cb33c70 100644 --- a/crystallib/clients/openai/embeddings.v +++ b/crystallib/clients/openai/embeddings.v @@ -49,6 +49,7 @@ pub fn (mut f OpenAIClient[Config]) create_embeddings(args EmbeddingCreateArgs) user: args.user } data := json.encode(req) - r := f.connection.post_json_str(prefix: 'embeddings', data: data)! + mut conn := f.connection()! + r := conn.post_json_str(prefix: 'embeddings', data: data)! return json.decode(EmbeddingResponse, r)! } diff --git a/crystallib/clients/openai/factory.v b/crystallib/clients/openai/factory.v deleted file mode 100644 index 747f101fc..000000000 --- a/crystallib/clients/openai/factory.v +++ /dev/null @@ -1,64 +0,0 @@ -module openai - -import freeflowuniverse.crystallib.core.base -import freeflowuniverse.crystallib.core.playbook -import freeflowuniverse.crystallib.ui as gui -import freeflowuniverse.crystallib.clients.httpconnection - -// import freeflowuniverse.crystallib.ui.console - -pub struct OpenAIClient[T] { - base.BaseConfig[T] -pub mut: - connection &httpconnection.HTTPConnection -} - -@[params] -pub struct Config { -pub mut: - openaikey string @[secret] - description string -} - -pub fn get(instance string, cfg Config) !OpenAIClient[Config] { - mut self := OpenAIClient[Config]{ - connection: &httpconnection.HTTPConnection{} - } - - if cfg.openaikey.len > 0 { - // first the type of the instance, then name of instance, then action - self.init('openaiclient', instance, .set, cfg)! - } else { - self.init('openaiclient', instance, .get)! - } - - mut conn := httpconnection.new( - name: 'openai' - url: 'https://api.openai.com/v1/' - )! - conn.default_header.add(.authorization, 'Bearer ${self.config()!.openaikey}') - // req.add_custom_header('x-disable-pagination', 'True') ! - - self.connection = conn - return self -} - -// get a new OpenAI client, will create if it doesn't exist or ask for new configuration -pub fn configure(instance_ string) ! { - mut cfg := Config{} - mut ui := gui.new()! - - mut instance := instance_ - if instance == '' { - instance = ui.ask_question( - question: 'name for Dagu client' - default: instance - )! - } - - cfg.openaikey = ui.ask_question( - question: '\nPlease specify your openai secret (instance:${instance}).' - )! - - get(instance, cfg)! -} diff --git a/crystallib/clients/openai/files.v b/crystallib/clients/openai/files.v index 718b61bdb..071257d04 100644 --- a/crystallib/clients/openai/files.v +++ b/crystallib/clients/openai/files.v @@ -58,7 +58,8 @@ pub fn (mut f OpenAIClient[Config]) upload_file(args FileUploadArgs) !File { req := httpconnection.Request{ prefix: 'files' } - r := f.connection.post_multi_part(req, form)! + mut conn := f.connection()! + r := conn.post_multi_part(req, form)! if r.status_code != 200 { return error('got error from server: ${r.body}') } @@ -67,24 +68,28 @@ pub fn (mut f OpenAIClient[Config]) upload_file(args FileUploadArgs) !File { // list all files in client org pub fn (mut f OpenAIClient[Config]) list_files() !Files { - r := f.connection.get(prefix: 'files')! + mut conn := f.connection()! + r := conn.get(prefix: 'files')! return json.decode(Files, r)! } // deletes a file pub fn (mut f OpenAIClient[Config]) delete_file(file_id string) !DeleteResp { - r := f.connection.delete(prefix: 'files/' + file_id)! + mut conn := f.connection()! + r := conn.delete(prefix: 'files/' + file_id)! return json.decode(DeleteResp, r)! } // returns a single file metadata pub fn (mut f OpenAIClient[Config]) get_file(file_id string) !File { - r := f.connection.get(prefix: 'files/' + file_id)! + mut conn := f.connection()! + r := conn.get(prefix: 'files/' + file_id)! return json.decode(File, r)! } // returns the content of a specific file pub fn (mut f OpenAIClient[Config]) get_file_content(file_id string) !string { - r := f.connection.get(prefix: 'files/' + file_id + '/content')! + mut conn := f.connection()! + r := conn.get(prefix: 'files/' + file_id + '/content')! return r } diff --git a/crystallib/clients/openai/fine_tunes.v b/crystallib/clients/openai/fine_tunes.v index 99dce686f..f9a579b0e 100644 --- a/crystallib/clients/openai/fine_tunes.v +++ b/crystallib/clients/openai/fine_tunes.v @@ -63,31 +63,36 @@ pub mut: // creates a new fine-tune based on an already uploaded file pub fn (mut f OpenAIClient[Config]) create_fine_tune(args FineTuneCreateArgs) !FineTune { data := json.encode(args) - r := f.connection.post_json_str(prefix: 'fine-tunes', data: data)! + mut conn := f.connection()! + r := conn.post_json_str(prefix: 'fine-tunes', data: data)! return json.decode(FineTune, r)! } // returns all fine-tunes in this account pub fn (mut f OpenAIClient[Config]) list_fine_tunes() !FineTuneList { - r := f.connection.get(prefix: 'fine-tunes')! + mut conn := f.connection()! + r := conn.get(prefix: 'fine-tunes')! return json.decode(FineTuneList, r)! } // get a single fine-tune information pub fn (mut f OpenAIClient[Config]) get_fine_tune(fine_tune string) !FineTune { - r := f.connection.get(prefix: 'fine-tunes/' + fine_tune)! + mut conn := f.connection()! + r := conn.get(prefix: 'fine-tunes/' + fine_tune)! return json.decode(FineTune, r)! } // cancel a fine-tune that didn't finish yet pub fn (mut f OpenAIClient[Config]) cancel_fine_tune(fine_tune string) !FineTune { - r := f.connection.post_json_str(prefix: 'fine-tunes/' + fine_tune + '/cancel')! + mut conn := f.connection()! + r := conn.post_json_str(prefix: 'fine-tunes/' + fine_tune + '/cancel')! return json.decode(FineTune, r)! } // returns all events for a fine tune in this account pub fn (mut f OpenAIClient[Config]) list_fine_tune_events(fine_tune string) !FineTuneEventList { - r := f.connection.get(prefix: 'fine-tunes/' + fine_tune + '/events')! + mut conn := f.connection()! + r := conn.get(prefix: 'fine-tunes/' + fine_tune + '/events')! return json.decode(FineTuneEventList, r)! } diff --git a/crystallib/clients/openai/images.v b/crystallib/clients/openai/images.v index 03097a98e..1165efafa 100644 --- a/crystallib/clients/openai/images.v +++ b/crystallib/clients/openai/images.v @@ -106,7 +106,8 @@ pub fn (mut f OpenAIClient[Config]) create_image(args ImageCreateArgs) !Images { user: args.user } data := json.encode(request) - r := f.connection.post_json_str(prefix: 'images/generations', data: data)! + mut conn := f.connection()! + r := conn.post_json_str(prefix: 'images/generations', data: data)! return json.decode(Images, r)! } @@ -148,7 +149,8 @@ pub fn (mut f OpenAIClient[Config]) create_edit_image(args ImageEditArgs) !Image req := httpconnection.Request{ prefix: 'images/edits' } - r := f.connection.post_multi_part(req, form)! + mut conn := f.connection()! + r := conn.post_multi_part(req, form)! if r.status_code != 200 { return error('got error from server: ${r.body}') } @@ -181,7 +183,8 @@ pub fn (mut f OpenAIClient[Config]) create_variation_image(args ImageVariationAr req := httpconnection.Request{ prefix: 'images/variations' } - r := f.connection.post_multi_part(req, form)! + mut conn := f.connection()! + r := conn.post_multi_part(req, form)! if r.status_code != 200 { return error('got error from server: ${r.body}') } diff --git a/crystallib/clients/openai/model_enums.v b/crystallib/clients/openai/model_enums.v index ed3905990..b28a620e6 100644 --- a/crystallib/clients/openai/model_enums.v +++ b/crystallib/clients/openai/model_enums.v @@ -1,49 +1,39 @@ module openai pub enum ModelType { - gpt_3_5_turbo + gpt_4_1106_preview + gpt_4_vision_preview gpt_4 - gpt_4_0613 gpt_4_32k - gpt_4_32k_0613 - gpt_3_5_turbo_0613 + gpt_3_5_turbo_1106 + gpt_3_5_turbo gpt_3_5_turbo_16k - gpt_3_5_turbo_16k_0613 whisper_1 } fn modelname_str(e ModelType) string { - if e == .gpt_4 { - return 'gpt-4' - } - if e == .gpt_3_5_turbo { - return 'gpt-3.5-turbo' - } return match e { - .gpt_4 { - 'gpt-4' + .gpt_4_1106_preview { + 'gpt-4-1106-preview' } - .gpt_3_5_turbo { - 'gpt-3.5-turbo' + .gpt_4_vision_preview { + 'gpt-4-vision-preview' } - .gpt_4_0613 { - 'gpt-4-0613' + .gpt_4 { + 'gpt-4' } .gpt_4_32k { 'gpt-4-32k' } - .gpt_4_32k_0613 { - 'gpt-4-32k-0613' + .gpt_3_5_turbo_1106 { + 'gpt-3.5-turbo-1106' } - .gpt_3_5_turbo_0613 { - 'gpt-3.5-turbo-0613' + .gpt_3_5_turbo { + 'gpt-3.5-turbo' } .gpt_3_5_turbo_16k { 'gpt-3.5-turbo-16k' } - .gpt_3_5_turbo_16k_0613 { - 'gpt-3.5-turbo-16k-0613' - } .whisper_1 { 'whisper-1' } diff --git a/crystallib/clients/openai/models.v b/crystallib/clients/openai/models.v index 70ce1b77c..e15c09099 100644 --- a/crystallib/clients/openai/models.v +++ b/crystallib/clients/openai/models.v @@ -35,12 +35,14 @@ pub mut: // list current models available in Open AI pub fn (mut f OpenAIClient[Config]) list_models() !Models { - r := f.connection.get(prefix: 'models')! + mut conn := f.connection()! + r := conn.get(prefix: 'models')! return json.decode(Models, r)! } // returns details of a model using the model id pub fn (mut f OpenAIClient[Config]) get_model(model string) !Model { - r := f.connection.get(prefix: 'models/' + model)! + mut conn := f.connection()! + r := conn.get(prefix: 'models/' + model)! return json.decode(Model, r)! } diff --git a/crystallib/clients/openai/moderation.v b/crystallib/clients/openai/moderation.v index bf63b39a1..56dc643aa 100644 --- a/crystallib/clients/openai/moderation.v +++ b/crystallib/clients/openai/moderation.v @@ -75,6 +75,7 @@ pub fn (mut f OpenAIClient[Config]) create_moderation(input string, model Modera model: moderation_model_str(model) } data := json.encode(req) - r := f.connection.post_json_str(prefix: 'moderations', data: data)! + mut conn := f.connection()! + r := conn.post_json_str(prefix: 'moderations', data: data)! return json.decode(ModerationResponse, r)! } diff --git a/crystallib/clients/openai/openai_factory_.v b/crystallib/clients/openai/openai_factory_.v new file mode 100644 index 000000000..836f19098 --- /dev/null +++ b/crystallib/clients/openai/openai_factory_.v @@ -0,0 +1,118 @@ + +module openai + +import freeflowuniverse.crystallib.core.base +import freeflowuniverse.crystallib.core.playbook + + +__global ( + openai_global map[string]&OpenAIClient + openai_default string +) + +/////////FACTORY + +@[params] +pub struct ArgsGet{ +pub mut: + name string = "default" +} + +fn args_get (args_ ArgsGet) ArgsGet { + mut args:=args_ + if args.name == ""{ + args.name = openai_default + } + if args.name == ""{ + args.name = "default" + } + return args +} + +pub fn get(args_ ArgsGet) !&OpenAIClient { + mut args := args_get(args_) + if !(args.name in openai_global) { + if ! config_exists(){ + if default{ + config_save()! + } + } + config_load()! + } + return openai_global[args.name] or { + println(openai_global) + panic("bug in get from factory: ") + } +} + + + +fn config_exists(args_ ArgsGet) bool { + mut args := args_get(args_) + mut context:=base.context() or { panic("bug") } + return context.hero_config_exists("openai",args.name) +} + +fn config_load(args_ ArgsGet) ! { + mut args := args_get(args_) + mut context:=base.context()! + mut heroscript := context.hero_config_get("openai",args.name)! + play(heroscript:heroscript)! +} + +fn config_save(args_ ArgsGet) ! { + mut args := args_get(args_) + mut context:=base.context()! + context.hero_config_set("openai",args.name,heroscript_default()!)! +} + + +fn set(o OpenAIClient)! { + mut o2:=obj_init(o)! + openai_global["default"] = &o2 +} + + +@[params] +pub struct PlayArgs { +pub mut: + name string = 'default' + heroscript string //if filled in then plbook will be made out of it + plbook ?playbook.PlayBook + reset bool + start bool + stop bool + restart bool + delete bool + configure bool //make sure there is at least one installed +} + +pub fn play(args_ PlayArgs) ! { + + mut args:=args_ + + if args.heroscript == "" { + args.heroscript = heroscript_default()! + } + mut plbook := args.plbook or { + playbook.new(text: args.heroscript)! + } + + mut install_actions := plbook.find(filter: 'openai.configure')! + if install_actions.len > 0 { + for install_action in install_actions { + mut p := install_action.params + mycfg:=cfg_play(p)! + set(mycfg)! + } + } + +} + + + + +//switch instance to be used for openai +pub fn switch(name string) { + openai_default = name +} diff --git a/crystallib/clients/openai/openai_model.v b/crystallib/clients/openai/openai_model.v new file mode 100644 index 000000000..333b03fc2 --- /dev/null +++ b/crystallib/clients/openai/openai_model.v @@ -0,0 +1,57 @@ +module openai +import freeflowuniverse.crystallib.data.paramsparser +import freeflowuniverse.crystallib.clients.httpconnection + +pub const version = '1.0.0' +const singleton = false +const default = true + +pub fn heroscript_default() !string { + heroscript := " + !!openai.configure + name:'openai' + openaikey:'your-api-key-here' + description:'OpenAI API Client' + " + return heroscript +} + +@[heap] +pub struct OpenAIClient { +pub mut: + name string = 'default' + openaikey string @[secret] + description string + conn ?&httpconnection.HTTPConnection +} + +fn cfg_play(p paramsparser.Params) ! { + mut mycfg := OpenAIClient{ + name: p.get_default('name', 'default')! + openaikey: p.get('openaikey')! + description: p.get_default('description', '')! + } + set(mycfg)! +} + +fn obj_init(obj_ OpenAIClient)!OpenAIClient { + mut obj := obj_ + return obj +} + + +pub fn (mut client OpenAIClient) connection() !&httpconnection.HTTPConnection { + mut c := client.conn or { + mut c2 := httpconnection.new( + name: 'openaiclient_${client.name}' + url: 'https://api.openai.com/v1' + cache: false + retry: 0 + )! + c2.basic_auth(h.user, h.password) + c2 + } + c.headers['Authorization'] = 'Bearer ${client.openaikey}' + client.conn = c + return c +} diff --git a/crystallib/clients/openai/readme.md b/crystallib/clients/openai/readme.md index ed02489ef..fd4c3f44b 100644 --- a/crystallib/clients/openai/readme.md +++ b/crystallib/clients/openai/readme.md @@ -1,50 +1,54 @@ -# OpenAI +# OpenAI Client Module -An implementation of an OpenAI client using Vlang. +This module provides a V client for interacting with OpenAI's API, allowing you to integrate OpenAI's services into your V applications. -## Supported methods +## Setup -- List available models -- Chat Completion -- Translate Audio -- Transcribe Audio -- Create image based on prompt -- Edit an existing image -- Create variation of an image +1. Get your API key from [OpenAI Platform](https://platform.openai.com/api-keys) +2. Configure the client using heroscript: -## Usage +```v +heroscript := " +!!openai.configure + name:'default' + openaikey:'your-api-key' // Required: your OpenAI API key + description:'My OpenAI API' // Optional +" + +// Apply the configuration (only needs to be done once) +openai.play(heroscript: heroscript)! +``` -To use the client you need a OpenAi key which can be generated from [here](https://platform.openai.com/account/api-keys). +## Usage -The key should be exposed in an environment variable as following: +### Initialize Client +```v +// Get a configured client instance +mut client := openai.get(name: 'something')! -```bash -export OPENAI_API_KEY= +// Or use default instance if name wasn't specified in configuration +mut client := openai.get()! ``` -To get a new instance of the client: +### Examples -```v -import freeflowuniverse.crystallib.clients.openai +> see examples/clients/openai -ai_cli := openai.new()! -``` +### Complete Example -Then it is possible to perform all the listed operations: +Here's a complete example showing common operations: ```v -// listing models -models := ai_cli.list_models()! +#!/usr/bin/env -S v run -// creating a new chat completion +import freeflowuniverse.crystallib.clients.openai -mut msg := []op.Message{} -msg << op.Message{ - role: op.RoleType.user - content: 'Say this is a test!' +fn main() { + // Get client instance (uses default if no name specified) + mut client := openai.get()! + + // Your OpenAI API operations here + // (Add specific operation examples once implemented) } -mut msgs := op.Messages{ - messages: msg -} -res := ai_cli.chat_completion(op.ModelType.gpt_3_5_turbo, msgs)! ``` + diff --git a/crystallib/develop/gittools/repository.v b/crystallib/develop/gittools/repository.v index 84b312403..c427064eb 100644 --- a/crystallib/develop/gittools/repository.v +++ b/crystallib/develop/gittools/repository.v @@ -3,7 +3,6 @@ module gittools import freeflowuniverse.crystallib.ui.console import freeflowuniverse.crystallib.osal import os -import time // GitRepo holds information about a single Git repository. @[heap] diff --git a/crystallib/virt/hetzner/readme.md b/crystallib/virt/hetzner/readme.md index cacba0117..d727a0858 100644 --- a/crystallib/virt/hetzner/readme.md +++ b/crystallib/virt/hetzner/readme.md @@ -7,6 +7,8 @@ This module provides a V client for interacting with Hetzner's Robot API, allowi 1. Create an account on [Hetzner Robot](https://robot.hetzner.com/preferences/index) 2. Configure the client using heroscript: ```v +import freeflowuniverse.crystallib.virt.hetzner + heroscript := " !!hetzner.configure name:'my_instance' diff --git a/examples/develop/openai/.gitignore b/examples/clients/openai/.gitignore similarity index 100% rename from examples/develop/openai/.gitignore rename to examples/clients/openai/.gitignore diff --git a/examples/clients/openai/openai_example.vsh b/examples/clients/openai/openai_example.vsh new file mode 100755 index 000000000..ede68512a --- /dev/null +++ b/examples/clients/openai/openai_example.vsh @@ -0,0 +1,44 @@ +#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run + +import freeflowuniverse.crystallib.clients.openai +import freeflowuniverse.crystallib.ui.console +import freeflowuniverse.crystallib.core.base + +console.print_header('OPENAI Example.') + +// Check for required environment variables +key := os.getenv('OPENAIKEY') +if key == '' { + eprintln('Error: OPENAIKEY environment variable is not set') + eprintln('Please set it using: export OPENAIKEY=your-yourkey') + exit(1) +} + +heroscript := " +!!openai.configure + name:'default' + openaikey:'${key}' +" + +openai.play(heroscript: heroscript)! + + +mut ai := openai.get()! + +models := ai.list_models()! + +println(models) + + +// mut msg := []openai.Message{} +// msg << openai.Message{ +// role: openai.RoleType.user +// content: 'Say this is a test!' +// } +// mut msgs := openai.Messages{ +// messages: msg +// } +// res := ai.chat_completion(openai.ModelType.gpt_3_5_turbo, msgs)! +// print(res) + + diff --git a/examples/clients/openai/openai_example_full.vsh b/examples/clients/openai/openai_example_full.vsh new file mode 100644 index 000000000..87d1ed281 --- /dev/null +++ b/examples/clients/openai/openai_example_full.vsh @@ -0,0 +1,97 @@ +#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run + +import freeflowuniverse.crystallib.clients.openai as openai +import freeflowuniverse.crystallib.ui.console +import freeflowuniverse.crystallib.core.base + +console.print_header('OPENAI Example.') + +// Check for required environment variables +key := os.getenv('OPENAIKEY') +if key == '' { + eprintln('Error: OPENAIKEY environment variable is not set') + eprintln('Please set it using: export OPENAIKEY=your-yourkey') + exit(1) +} + +heroscript := " +!!openai.configure + name:'default' + openaikey:'${key}' +" + +openai.play(heroscript: heroscript)! + + +mut ai_cli := openai.get()! + +mut msg := []openai.Message{} +msg << openai.Message{ + role: openai.RoleType.user + content: 'Say this is a test!' +} +mut msgs := openai.Messages{ + messages: msg +} +res := ai_cli.chat_completion(openai.ModelType.gpt_3_5_turbo, msgs)! +print(res) + +models := ai_cli.list_models()! + +model := ai_cli.get_model(models.data[0].id)! +print(model) +images_created := ai_cli.create_image(openai.ImageCreateArgs{ + prompt: 'Calm weather' + num_images: 2 + size: openai.ImageSize.size_512_512 + format: openai.ImageRespType.url +})! +print(images_created) +images_updated := ai_cli.create_edit_image(openai.ImageEditArgs{ + image_path: '/path/to/image.png' + mask_path: '/path/to/mask.png' + prompt: 'Calm weather' + num_images: 2 + size: openai.ImageSize.size_512_512 + format: openai.ImageRespType.url +})! +print(images_updated) +images_variatons := ai_cli.create_variation_image(openai.ImageVariationArgs{ + image_path: '/path/to/image.png' + num_images: 2 + size: openai.ImageSize.size_512_512 + format: openai.ImageRespType.url +})! +print(images_variatons) + +transcription := ai_cli.create_transcription(openai.AudioArgs{ + filepath: '/path/to/audio' +})! +print(transcription) + +translation := ai_cli.create_tranlation(openai.AudioArgs{ + filepath: '/path/to/audio' +})! +print(translation) + +file_upload := ai_cli.upload_file(filepath: '/path/to/file.jsonl', purpose: 'fine-tune') +print(file_upload) +files := ai_cli.list_filess()! +print(files) +resp := ai_cli.create_fine_tune(training_file: file.id, model: 'curie')! +print(resp) + +fine_tunes := ai_cli.list_fine_tunes()! +print(fine_tunes) + +fine_tune := ai_cli.get_fine_tune(fine_tunes.data[0].id)! +print(fine_tune) + +moderations := ai_cli.create_moderation('Something violent', openai.ModerationModel.text_moderation_latest)! +print(moderations) + +embeddings := ai_cli.create_embeddings( + input: ['sample embedding input'] + model: openai.EmbeddingModel.text_embedding_ada +)! +print(embeddings) diff --git a/examples/develop/openai/openai_example.vsh b/examples/develop/openai/openai_example.vsh deleted file mode 100644 index 383509600..000000000 --- a/examples/develop/openai/openai_example.vsh +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run - -import freeflowuniverse.crystallib.clients.openai as op - -mut ai_cli := op.new()! -mut msg := []op.Message{} -msg << op.Message{ - role: op.RoleType.user - content: 'Say this is a test!' -} -mut msgs := op.Messages{ - messages: msg -} -res := ai_cli.chat_completion(op.ModelType.gpt_3_5_turbo, msgs)! -print(res) - -models := ai_cli.list_models()! - -model := ai_cli.get_model(models.data[0].id)! -print(model) -images_created := ai_cli.create_image(op.ImageCreateArgs{ - prompt: 'Calm weather' - num_images: 2 - size: op.ImageSize.size_512_512 - format: op.ImageRespType.url -})! -print(images_created) -images_updated := ai_cli.create_edit_image(op.ImageEditArgs{ - image_path: '/path/to/image.png' - mask_path: '/path/to/mask.png' - prompt: 'Calm weather' - num_images: 2 - size: op.ImageSize.size_512_512 - format: op.ImageRespType.url -})! -print(images_updated) -images_variatons := ai_cli.create_variation_image(op.ImageVariationArgs{ - image_path: '/path/to/image.png' - num_images: 2 - size: op.ImageSize.size_512_512 - format: op.ImageRespType.url -})! -print(images_variatons) - -transcription := ai_cli.create_transcription(op.AudioArgs{ - filepath: '/path/to/audio' -})! -print(transcription) - -translation := ai_cli.create_tranlation(op.AudioArgs{ - filepath: '/path/to/audio' -})! -print(translation) - -file_upload := ai_cli.upload_file(filepath: '/path/to/file.jsonl', purpose: 'fine-tune') -print(file_upload) -files := ai_cli.list_filess()! -print(files) -resp := ai_cli.create_fine_tune(training_file: file.id, model: 'curie')! -print(resp) - -fine_tunes := ai_cli.list_fine_tunes()! -print(fine_tunes) - -fine_tune := ai_cli.get_fine_tune(fine_tunes.data[0].id)! -print(fine_tune) - -moderations := ai_cli.create_moderation('Something violent', op.ModerationModel.text_moderation_latest)! -print(moderations) - -embeddings := ai_cli.create_embeddings( - input: ['sample embedding input'] - model: op.EmbeddingModel.text_embedding_ada -)! -print(embeddings) diff --git a/examples/virt/hetzner/hetzner_example.vsh b/examples/virt/hetzner/hetzner_example.vsh index 0d2064122..63e2b55f0 100755 --- a/examples/virt/hetzner/hetzner_example.vsh +++ b/examples/virt/hetzner/hetzner_example.vsh @@ -1,10 +1,7 @@ -#!/usr/bin/env -S v -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.crystallib.virt.hetzner import freeflowuniverse.crystallib.ui.console -import freeflowuniverse.crystallib.core.base -import freeflowuniverse.crystallib.builder -import time import os console.print_header('Hetzner login.') @@ -62,9 +59,11 @@ mut cl := hetzner.get(name: 'test')! // get the server in rescue mode, if its already in rescue then will not reboot, but just go there // hero_install will make sure we have hero in the rescue server -mut n := cl.server_rescue_node( - name: 'kristof2' +server := cl.server_rescue_node( + name: 'kristof4' wait: true sshkey_name: 'kristof@incubaid.com' hero_install: true )! + +println('Server rescue node created: ${server}')