repo_name
stringlengths
2
55
dataset
stringclasses
1 value
owner
stringlengths
3
31
lang
stringclasses
10 values
func_name
stringlengths
1
104
code
stringlengths
20
96.7k
docstring
stringlengths
1
4.92k
url
stringlengths
94
241
sha
stringlengths
40
40
litestack
github_2023
oldmoe
ruby
Litejobqueue.delete
def delete(id) job = super(id) @logger.info("[litejob]:[DEL] job: #{job}") job = Oj.load(job[0], symbol_keys: true) if job job end
# delete a job from the job queue # class EasyJob # def perform(any, number, of_params) # # do anything # end # end # jobqueue = Litejobqueue.new # id = jobqueue.push(EasyJob, params, 10) # queue for processing in 10 seconds # jobqueue.delete(id)
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/litestack/litejobqueue.rb#L120-L125
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Litequeue.clear
def clear(queue = nil) run_sql("DELETE FROM queue WHERE iif(?1 IS NOT NULL, name = ?1, TRUE)", queue) end
# deletes all the entries in all queues, or if a queue name is given, deletes all entries in that specific queue
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/litestack/litequeue.rb#L79-L81
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatabaseMethods.supports_create_table_if_not_exists?
def supports_create_table_if_not_exists? sqlite_version >= 30300 end
# SQLite supports CREATE TABLE IF NOT EXISTS syntax since 3.3.0.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L129-L131
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatabaseMethods.supports_deferrable_foreign_key_constraints?
def supports_deferrable_foreign_key_constraints? sqlite_version >= 30619 end
# SQLite 3.6.19+ supports deferrable foreign key constraints.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L134-L136
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatabaseMethods.views
def views(opts = OPTS) tables_and_views({type: "view"}, opts) end
# Array of symbols specifying the view names in the current database. # # Options: # :server :: Set the server to use.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L180-L182
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatabaseMethods._foreign_key_list_ds
def _foreign_key_list_ds(table) metadata_dataset.with_sql("PRAGMA foreign_key_list(?)", input_identifier_meth.call(table)) end
# Dataset used for parsing foreign key lists
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L187-L189
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatabaseMethods._parse_pragma_ds
def _parse_pragma_ds(table_name, opts) metadata_dataset.with_sql("PRAGMA table_#{"x" if sqlite_version > 33100}info(?)", input_identifier_meth(opts[:dataset]).call(table_name)) end
# Dataset used for parsing schema
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L192-L194
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.complex_expression_sql_append
def complex_expression_sql_append(sql, op, args) case op when :"NOT LIKE", :"NOT ILIKE" sql << "NOT " complex_expression_sql_append(sql, ((op == :"NOT ILIKE") ? :ILIKE : :LIKE), args) when :^ complex_expression_arg_pairs_append(sql, args) { |a, b| Sequel.lit(["((~(", " & ", ")) & (", " | ", "))"], a, b, a, b) } when :** unless (exp = args[1]).is_a?(Integer) raise(Sequel::Error, "can only emulate exponentiation on SQLite if exponent is an integer, given #{exp.inspect}") end case exp when 0 sql << "1" else sql << "(" arg = args[0] if exp < 0 invert = true exp = exp.abs sql << "(1.0 / (" end (exp - 1).times do literal_append(sql, arg) sql << " * " end literal_append(sql, arg) sql << ")" if invert sql << "))" end end when :extract part = args[0] raise(Sequel::Error, "unsupported extract argument: #{part.inspect}") unless (format = EXTRACT_MAP[part]) sql << "CAST(strftime(" << format << ", " literal_append(sql, args[1]) sql << ") AS " << ((part == :second) ? "NUMERIC" : "INTEGER") << ")" else super end end
# SQLite doesn't support a NOT LIKE b, you need to use NOT (a LIKE b). # It doesn't support xor, power, or the extract function natively, so those have to be emulated.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L600-L641
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.explain
def explain(opts = nil) # Load the PrettyTable class, needed for explain output Sequel.extension(:_pretty_table) unless defined?(Sequel::PrettyTable) ds = db.send(:metadata_dataset).clone(sql: "EXPLAIN #{select_sql}") rows = ds.all Sequel::PrettyTable.string(rows, ds.columns) end
# Return an array of strings specifying a query explanation for a SELECT of the # current dataset. Currently, the options are ignored, but it accepts options # to be compatible with other adapters.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L669-L676
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.having
def having(*cond) raise(InvalidOperation, "Can only specify a HAVING clause on a grouped dataset") if !@opts[:group] && db.sqlite_version < 33900 super end
# HAVING requires GROUP BY on SQLite
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L679-L682
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.insert_select
def insert_select(*values) return unless supports_insert_select? # Handle case where query does not return a row server?(:default).with_sql_first(insert_select_sql(*values)) || false end
# Support insert select for associations, so that the model code can use # returning instead of a separate query.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L686-L690
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.returning
def returning(*values) return super if values.empty? raise Error, "RETURNING is not supported on #{db.database_type}" unless supports_returning?(:insert) clone(returning: _returning_values(values).freeze) end
# Automatically add aliases to RETURNING values to work around SQLite bug.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L781-L785
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.supports_cte?
def supports_cte?(type = :select) db.sqlite_version >= 30803 end
# SQLite 3.8.3+ supports common table expressions.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L788-L790
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.supports_deleting_joins?
def supports_deleting_joins? false end
# SQLite does not support deleting from a joined dataset
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L803-L805
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.supports_window_clause?
def supports_window_clause? db.sqlite_version >= 32800 end
# SQLite 3.28+ supports the WINDOW clause.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L845-L847
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.insert_on_conflict_sql
def insert_on_conflict_sql(sql) if (opts = @opts[:insert_on_conflict]) sql << " ON CONFLICT" if (target = opts[:constraint]) sql << " ON CONSTRAINT " identifier_append(sql, target) elsif (target = opts[:target]) sql << " " identifier_append(sql, Array(target)) if (conflict_where = opts[:conflict_where]) sql << " WHERE " literal_append(sql, conflict_where) end end if (values = opts[:update]) sql << " DO UPDATE SET " update_sql_values_hash(sql, values) if (update_where = opts[:update_where]) sql << " WHERE " literal_append(sql, update_where) end else sql << " DO NOTHING" end end end
# Add ON CONFLICT clause if it should be used
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L934-L961
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.literal_blob_append
def literal_blob_append(sql, v) sql << "X'" << v.unpack1("H*") << "'" end
# SQLite uses a preceding X for hex escaping strings
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L964-L966
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods.multi_insert_sql_strategy
def multi_insert_sql_strategy (db.sqlite_version >= 30711) ? :values : :union end
# SQLite only supporting multiple rows in the VALUES clause # starting in 3.7.11. On older versions, fallback to using a UNION.
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L980-L982
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
litestack
github_2023
oldmoe
ruby
Sequel.Litedb.DatasetMethods._truncate_sql
def _truncate_sql(table) "DELETE FROM #{table}" end
# SQLite treats a DELETE with no WHERE clause as a TRUNCATE
https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L1032-L1034
e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf
libtailscale
github_2023
tailscale
ruby
Tailscale.set_hostname
def set_hostname(hostname) assert_open Error.check self, Libtailscale::TsnetSetHostname(@t, hostname) end
# Set the hostname to use for the tailscale node.
https://github.com/tailscale/libtailscale/blob/9d45e587f0837c9ea0ee55cba08dfa448dc8d480/ruby/lib/tailscale.rb#L196-L199
9d45e587f0837c9ea0ee55cba08dfa448dc8d480
libtailscale
github_2023
tailscale
ruby
Tailscale.set_auth_key
def set_auth_key(auth_key) assert_open Error.check self, Libtailscale::TsnetSetAuthKey(@t, auth_key) end
# Set the auth key to use for the tailscale node.
https://github.com/tailscale/libtailscale/blob/9d45e587f0837c9ea0ee55cba08dfa448dc8d480/ruby/lib/tailscale.rb#L202-L205
9d45e587f0837c9ea0ee55cba08dfa448dc8d480
libtailscale
github_2023
tailscale
ruby
Tailscale.set_ephemeral
def set_ephemeral(ephemeral) assert_open Error.check self, Libtailscale::TsnetSetEphemeral(@t, ephemeral ? 1 : 0) end
# Set whether the node is ephemeral or not.
https://github.com/tailscale/libtailscale/blob/9d45e587f0837c9ea0ee55cba08dfa448dc8d480/ruby/lib/tailscale.rb#L214-L217
9d45e587f0837c9ea0ee55cba08dfa448dc8d480
libtailscale
github_2023
tailscale
ruby
Tailscale.loopback
def loopback assert_open addrbuf = FFI::MemoryPointer.new(:char, 1024) proxycredbuf = FFI::MemoryPointer.new(:char, 33) localcredbuf = FFI::MemoryPointer.new(:char, 33) Error.check self, Libtailscale::TsnetLoopback(@t, addrbuf, addrbuf.size, proxycredbuf, localcredbuf) [addrbuf.read_string, proxycredbuf.read_string, localcredbuf.read_string] end
# Start a listener on a loopback address, and returns the address # and credentials for using it as LocalAPI or a proxy.
https://github.com/tailscale/libtailscale/blob/9d45e587f0837c9ea0ee55cba08dfa448dc8d480/ruby/lib/tailscale.rb#L248-L255
9d45e587f0837c9ea0ee55cba08dfa448dc8d480
homographic_spoofing
github_2023
basecamp
ruby
HomographicSpoofing::Detector::Rule::MixedDigits.read_digits
def read_digits File.read("#{__dir__}/data/digits.csv") end
# Built with script/development/generate_digits_characters.rb
https://github.com/basecamp/homographic_spoofing/blob/eca57ac5f2238a377b9e5201dbffedeb3d8cb5c6/lib/homographic_spoofing/detector/rule/mixed_digits.rb#L27-L29
eca57ac5f2238a377b9e5201dbffedeb3d8cb5c6
homographic_spoofing
github_2023
basecamp
ruby
HomographicSpoofing::Detector::Rule::Idn::ScriptSpecific.latin_spoof?
def latin_spoof? scripts != Set[LATN] && non_ascii_latin_letters.present? end
# Disallow non-ASCII Latin letters to mix with a non-Latin script. # Note that the non-ASCII Latin check should not be applied when the entire label is made of Latin.
https://github.com/basecamp/homographic_spoofing/blob/eca57ac5f2238a377b9e5201dbffedeb3d8cb5c6/lib/homographic_spoofing/detector/rule/idn/script_specific.rb#L10-L12
eca57ac5f2238a377b9e5201dbffedeb3d8cb5c6
junodb
github_2023
paypal
ruby
Juno.Client.ReactClient.create
def create(key, value, ttl: nil) juno_request = Juno::Client::JunoRequest.new(key: key, value: value, version: 0, type: Juno::Client::JunoRequest::Type::CREATE, time_to_live_s: ttl, creation_time: Time.now.to_i) process_single(juno_request) end
# Function to create new key value pair # @param key [String] key for the document (required) # @param value [String] value for the document (required) # @param ttl [Integer] Time to live for the document (optional, default: read from config file) # @return [Boolean] True if operation submited successfully, else false # @see #process_single # @see Juno::DefaultProperties::DEFAULT_LIFETIME_S
https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/Client/react_client.rb#L56-L64
9750751a5335111b7a536326df5925e78cad245d
junodb
github_2023
paypal
ruby
Juno.IO.MetadataComponent.set_time_to_live
def set_time_to_live(ttl) ttl = ttl.to_i raise ArgumentError, 'TTL should be > 0' unless ttl.positive? @time_to_live = ttl ttl = [ttl].pack(OffsetWidth.UINT32) add_field(MetadataField.new(0x01, 0x01, ttl)) end
# @param ttl [Integer] - Record Time to live
https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/IO/MetadataComponent.rb#L102-L109
9750751a5335111b7a536326df5925e78cad245d
junodb
github_2023
paypal
ruby
Juno.IO.MetadataComponent.set_originator_request_id
def set_originator_request_id(input_uuid_byte_string = nil) @originator_request_id = if input_uuid_byte_string.nil? UUIDTools::UUID.random_create else UUIDTools::UUID.parse_raw(input_uuid_byte_string) end add_field(MetadataField.new(0x08, 0x03, @originator_request_id.raw)) @originator_request_id end
# @param input_uuid_byte_string [String] (optional) # if not provided, creates a uuid itself
https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/IO/MetadataComponent.rb#L168-L176
9750751a5335111b7a536326df5925e78cad245d
junodb
github_2023
paypal
ruby
Juno.IO.OperationMessage.size
def size total_size = protocol_header.num_bytes total_size += payload_component.num_bytes unless payload_component.nil? total_size += metadata_component.num_bytes unless metadata_component.nil? total_size end
# Calculates size of message
https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/IO/OperationMessage.rb#L15-L20
9750751a5335111b7a536326df5925e78cad245d
junodb
github_2023
paypal
ruby
Juno.IO.PayloadComponent.custom_num_bytes
def custom_num_bytes size = component_size.num_bytes + tag_id.num_bytes + namespace_length.num_bytes + key_length.num_bytes + payload_length.num_bytes + namespace.num_bytes + payload_key.num_bytes size += payload_type.num_bytes + payload_data.num_bytes if payload_length.positive? size end
# to prevent stack overflow
https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/IO/PayloadComponent.rb#L103-L107
9750751a5335111b7a536326df5925e78cad245d
junodb
github_2023
paypal
ruby
Juno.Net.ClientHandler.on_connection_completed
def on_connection_completed # puts "completed #{Time.now}" end
# Method called when TCP connection estabilished. If useSSL is true, it is called after a successfull ssl handshake
https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/Net/client_handler.rb#L50-L52
9750751a5335111b7a536326df5925e78cad245d
junodb
github_2023
paypal
ruby
Juno.Net.IOProcessor.juno_connect
def juno_connect(recycle = false) return if !recycle && [email protected]? && @channel.is_connected? new_channel = EventMachine.connect(Juno.juno_config.host, Juno.juno_config.port, ClientHandler, self) new_channel.pending_connect_timeout = Juno.juno_config.connection_lifetime EventMachine::Timer.new(Juno.juno_config.connection_timeout.to_f / 1000) do if new_channel.is_connected? @LOGGER.info(@PROG_NAME) { "conncected to #{Juno.juno_config.host}:#{Juno.juno_config.port} " } if recycle old_channel = @channel @channel = new_channel disconnect_channel(old_channel) else @channel = new_channel end initiate_bypass_ltm if use_ltm? set_recycle_timer else @recycle_timer&.cancel new_channel&.close_connection if !new_channel.nil? && new_channel.is_connected? @LOGGER.info(@PROG_NAME) do "Could not conncect to #{Juno.juno_config.host}:#{Juno.juno_config.port}\n Retrying in #{@reconnect_wait_time.to_f / 1000}ms " end EventMachine::Timer.new(@reconnect_wait_time.to_f / 1000) do @reconnect_wait_time *= 2 @reconnect_wait_time = MAX_WAIT_TIME if @reconnect_wait_time > MAX_WAIT_TIME @reconnect_wait_time *= (1 + 0.3 * rand) juno_connect(recycle) end end end end
# Method to handle connections creation, re-attempts on failure, initiates connection refresh and connection to Proxy # @param recycle [Boolean] - True if connection refresh request (optional, default: false)
https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/Net/io_processor.rb#L107-L138
9750751a5335111b7a536326df5925e78cad245d
junodb
github_2023
paypal
ruby
Juno.Net.PingMessage.initialize
def initialize(app_name = nil, opaque = 0) @PROG_NAME = self.class.name @LOGGER = Juno::Logger.instance app_name = JUNO_INTERNAL_APPNAME if app_name.to_s.empty? meta_data_component = Juno::IO::MetadataComponent.new meta_data_component.set_request_uuid meta_data_component.set_source_info(app_name: app_name, ip: IPAddr.new(Juno::Utils.local_ips[0]), port: 0) protocol_header = Juno::IO::ProtocolHeader.new protocol_header.opcode = Juno::IO::ProtocolHeader::OpCodes::Nop protocol_header.opaque = opaque @operation_message = Juno::IO::OperationMessage.new @operation_message.metadata_component = meta_data_component @operation_message.protocol_header = protocol_header end
# @param operation_message [Juno::IO::OperationMessage] (optional, default: Juno::Net::PingMessage::JUNO_INTERNAL_APPNAME) # @param opaque [Integer] (optional, default: 0)
https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/Net/ping_message.rb#L15-L31
9750751a5335111b7a536326df5925e78cad245d
AITreasureBox
github_2023
superiorlu
ruby
sync_today_stars
def sync_today_stars(info, latest_stars) today = Time.now.strftime('%Y-%m-%d') if info.nil? || !info.include?('_') [today, latest_stars, 0] else date, total_stars, change_stars = info.split('_') if date != today change_stars = 0 end if latest_stars.to_i == 0 [today, total_stars, change_stars] else change_stars = change_stars.to_i + (latest_stars.to_i - total_stars.to_i) [today, latest_stars, change_stars] end end end
# cumulate stars changes
https://github.com/superiorlu/AITreasureBox/blob/4f7e1a1b62066557c5ac95c32818c2082873b9e8/lib/update_readme.rb#L143-L159
4f7e1a1b62066557c5ac95c32818c2082873b9e8
tiktoken_ruby
github_2023
IAPark
ruby
list_encoding_names
def list_encoding_names SUPPORTED_ENCODINGS end
# Lists all the encodings that are supported
https://github.com/IAPark/tiktoken_ruby/blob/ab315180ce105f6e01ed278ff359dfb7f28c7196/lib/tiktoken_ruby.rb#L51-L53
ab315180ce105f6e01ed278ff359dfb7f28c7196
tiktoken_ruby
github_2023
IAPark
ruby
list_model_names
def list_model_names MODEL_TO_ENCODING_NAME.keys end
# Lists all the models that are supported
https://github.com/IAPark/tiktoken_ruby/blob/ab315180ce105f6e01ed278ff359dfb7f28c7196/lib/tiktoken_ruby.rb#L57-L59
ab315180ce105f6e01ed278ff359dfb7f28c7196
standard-rails
github_2023
standardrb
ruby
Standard.Rails.Plugin.without_extended_rule_configs
def without_extended_rule_configs(rules) rules.reject { |(name, _)| ["Style/InvertibleUnlessCondition", "Lint/SafeNavigationChain"].include?(name) }.to_h end
# See: https://github.com/standardrb/standard-rails/issues/25#issuecomment-1881127173
https://github.com/standardrb/standard-rails/blob/0872d5cf69aba21b125f1a06ad951ce007b6de10/lib/standard/rails/plugin.rb#L55-L59
0872d5cf69aba21b125f1a06ad951ce007b6de10
standard-rails
github_2023
standardrb
ruby
Standard.Rails.Plugin.without_warnings
def without_warnings(&blk) original_verbose = $VERBOSE $VERBOSE = nil yield ensure $VERBOSE = original_verbose end
# This is also not fantastic, but because loading RuboCop before loading
https://github.com/standardrb/standard-rails/blob/0872d5cf69aba21b125f1a06ad951ce007b6de10/lib/standard/rails/plugin.rb#L85-L91
0872d5cf69aba21b125f1a06ad951ce007b6de10
universalid
github_2023
hopsoft
ruby
ActiveRecordETL.Pipeline.attributes
def attributes record.attributes end
# @return [Hash] the record's attributes
https://github.com/hopsoft/universalid/blob/4294d1171b67510c85dc05e0cfd02353adc58614/test/rails_kit/models/active_record_etl.rb#L40-L42
4294d1171b67510c85dc05e0cfd02353adc58614
universalid
github_2023
hopsoft
ruby
ActiveRecordETL.Pipeline.loaded_nested_attribute_names
def loaded_nested_attribute_names nested_attribute_names & loaded_has_many_associations_by_name.keys end
# Attribute names that the record `accepts_nested_attributes_for` that have been loaded into memory
https://github.com/hopsoft/universalid/blob/4294d1171b67510c85dc05e0cfd02353adc58614/test/rails_kit/models/active_record_etl.rb#L89-L91
4294d1171b67510c85dc05e0cfd02353adc58614
universalid
github_2023
hopsoft
ruby
ActiveRecordETL.Pipeline.parent_attribute_names
def parent_attribute_names record.class.reflections.each_with_object([]) do |(name, reflection), memo| memo << reflection.foreign_key if reflection.macro == :belongs_to end end
# Attribute names for all the record's `belongs_to` associations
https://github.com/hopsoft/universalid/blob/4294d1171b67510c85dc05e0cfd02353adc58614/test/rails_kit/models/active_record_etl.rb#L96-L100
4294d1171b67510c85dc05e0cfd02353adc58614
universalid
github_2023
hopsoft
ruby
ActiveRecordETL.Pipeline.transform
def transform(format: :json, **options) case format # when :json then extract(**options).to_json when :json then Oj.dump extract(**options), symbol_keys: false else raise NotImplementedError end end
# @raise [NotImplementedError] if the specified format is not supported
https://github.com/hopsoft/universalid/blob/4294d1171b67510c85dc05e0cfd02353adc58614/test/rails_kit/models/active_record_etl.rb#L145-L151
4294d1171b67510c85dc05e0cfd02353adc58614
oss-arch-gym
github_2023
srivatsankrishnan
ruby
BasicTest.MessageContainerTest.test_to_h
def test_to_h m = TestMessage.new(:optional_bool => true, :optional_double => -10.100001, :optional_string => 'foo', :repeated_string => ['bar1', 'bar2'], :repeated_msg => [TestMessage2.new(:foo => 100)]) expected_result = { :optional_bool=>true, :optional_bytes=>"", :optional_double=>-10.100001, :optional_enum=>:Default, :optional_float=>0.0, :optional_int32=>0, :optional_int64=>0, :optional_msg=>nil, :optional_string=>"foo", :optional_uint32=>0, :optional_uint64=>0, :repeated_bool=>[], :repeated_bytes=>[], :repeated_double=>[], :repeated_enum=>[], :repeated_float=>[], :repeated_int32=>[], :repeated_int64=>[], :repeated_msg=>[{:foo => 100}], :repeated_string=>["bar1", "bar2"], :repeated_uint32=>[], :repeated_uint64=>[] } assert_equal expected_result, m.to_h m = MapMessage.new( :map_string_int32 => {"a" => 1, "b" => 2}, :map_string_msg => {"a" => TestMessage2.new(:foo => 1), "b" => TestMessage2.new(:foo => 2)}, :map_string_enum => {"a" => :A, "b" => :B}) expected_result = { :map_string_int32 => {"a" => 1, "b" => 2}, :map_string_msg => {"a" => {:foo => 1}, "b" => {:foo => 2}}, :map_string_enum => {"a" => :A, "b" => :B} } assert_equal expected_result, m.to_h end
#def test_json_quoted_string
https://github.com/srivatsankrishnan/oss-arch-gym/blob/fab6d1442541b5cdf40daf24e64e63261da2d846/sims/AstraSim/protobuf-3.12.4/ruby/tests/basic.rb#L435-L474
fab6d1442541b5cdf40daf24e64e63261da2d846
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.initialize
def initialize( llm:, tools: [], instructions: nil, tool_choice: "auto", parallel_tool_calls: true, messages: [], # Callbacks add_message_callback: nil, tool_execution_callback: nil, &block ) unless tools.is_a?(Array) && tools.all? { |tool| tool.class.singleton_class.included_modules.include?(Langchain::ToolDefinition) } raise ArgumentError, "Tools must be an array of objects extending Langchain::ToolDefinition" end @llm = llm @llm_adapter = LLM::Adapter.build(llm) @add_message_callback = add_message_callback if validate_callback!("add_message_callback", add_message_callback) @tool_execution_callback = tool_execution_callback if validate_callback!("tool_execution_callback", tool_execution_callback) self.messages = messages @tools = tools @parallel_tool_calls = parallel_tool_calls self.tool_choice = tool_choice self.instructions = instructions @block = block @state = :ready @total_prompt_tokens = 0 @total_completion_tokens = 0 @total_tokens = 0 end
# @param tool_execution_callback [Proc] A callback function (Proc or lambda) that is called right before a tool function is executed
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L40-L73
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.add_messages
def add_messages(messages:) messages.each do |message_hash| add_message(**message_hash.slice(:content, :role, :tool_calls, :tool_call_id)) end end
# @return [Array<Langchain::Message>] The messages
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L125-L129
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.run!
def run! run(auto_tool_execution: true) end
# @return [Array<Langchain::Message>] The messages
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L151-L153
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.replace_system_message!
def replace_system_message!(content:) messages.delete_if(&:system?) return if content.nil? message = build_message(role: "system", content: content) messages.unshift(message) end
# @return [Array<Langchain::Message>] The messages
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L219-L225
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.validate_tool_choice!
def validate_tool_choice!(tool_choice) allowed_tool_choices = llm_adapter.allowed_tool_choices.concat(available_tool_names) unless allowed_tool_choices.include?(tool_choice) raise ArgumentError, "Tool choice must be one of: #{allowed_tool_choices.join(", ")}" end end
# TODO: If tool_choice = "tool_function_name" and then tool is removed from the assistant, should we set tool_choice back to "auto"?
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L228-L233
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.handle_system_message
def handle_system_message Langchain.logger.warn("#{self.class} - At least one user message is required after a system message") :completed end
# @return [Symbol] The completed state
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L279-L282
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.handle_unexpected_message
def handle_unexpected_message Langchain.logger.error("#{self.class} - Unexpected message role encountered: #{messages.last.standard_role}") :failed end
# Handle unexpected message scenario
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L294-L297
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.handle_user_or_tool_message
def handle_user_or_tool_message response = chat_with_llm add_message(role: response.role, content: response.chat_completion, tool_calls: response.tool_calls) record_used_tokens(response.prompt_tokens, response.completion_tokens, response.total_tokens) set_state_for(response: response) end
# Handle user or tool message scenario by processing the LLM response
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L302-L309
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.run_tools
def run_tools(tool_calls) # Iterate over each function invocation and submit tool output tool_calls.each do |tool_call| run_tool(tool_call) end end
# @param tool_calls [Array<Hash>] The tool calls to run
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L354-L359
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::ToolDefinition.ParameterBuilder.build
def build(&block) instance_eval(&block) @schema end
# @return [Hash] The built schema
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool_definition.rb#L152-L155
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::ToolDefinition.ParameterBuilder.validate_parameters
def validate_parameters(name:, type:, enum:, required:) if @parent_type == "object" if name.nil? raise ArgumentError, "Name must be provided for properties of an object" end unless name.is_a?(Symbol) raise ArgumentError, "Invalid name '#{name}'. Name must be a symbol" end end unless VALID_TYPES.include?(type) raise ArgumentError, "Invalid type '#{type}'. Valid types are: #{VALID_TYPES.join(", ")}" end unless enum.nil? || enum.is_a?(Array) raise ArgumentError, "Invalid enum '#{enum}'. Enum must be nil or an array" end unless [true, false].include?(required) raise ArgumentError, "Invalid required '#{required}'. Required must be a boolean" end end
# @raise [ArgumentError] If any parameter is invalid
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool_definition.rb#L208-L229
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.ToolResponse.initialize
def initialize(content: nil, image_url: nil) raise ArgumentError, "Either content or image_url must be provided" if content.nil? && image_url.nil? @content = content @image_url = image_url end
# @param image_url [String, nil] Optional URL to an image.
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool_response.rb#L13-L18
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.LLM.Adapters.Base.tool_role
def tool_role raise NotImplementedError, "Subclasses must implement tool_role" end
# Role name used to return the tool output
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/base.rb#L56-L58
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.LLM.Adapters.MistralAI.build_message
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil) Messages::MistralAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id) end
# @return [Messages::MistralAIMessage] The Mistral AI message
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/mistral_ai.rb#L41-L43
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.LLM.Adapters.MistralAI.extract_tool_call_args
def extract_tool_call_args(tool_call:) tool_call_id = tool_call.dig("id") function_name = tool_call.dig("function", "name") tool_name, method_name = function_name.split("__") tool_arguments = tool_call.dig("function", "arguments") tool_arguments = if tool_arguments.is_a?(Hash) Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments) else JSON.parse(tool_arguments, symbolize_names: true) end [tool_call_id, tool_name, method_name, tool_arguments] end
# Extract the tool call information from the OpenAI tool call hash
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/mistral_ai.rb#L49-L63
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.LLM.Adapters.MistralAI.available_tool_names
def available_tool_names(tools) build_tools(tools).map { |tool| tool.dig(:function, :name) } end
# Get the available tool names for Mistral AI
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/mistral_ai.rb#L76-L78
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.LLM.Adapters.Ollama.build_message
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil) Messages::OllamaMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id) end
# @return [Messages::OllamaMessage] The Ollama message
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/ollama.rb#L41-L43
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.LLM.Adapters.Ollama.available_tool_names
def available_tool_names(tools) build_tools(tools).map { |tool| tool.dig(:function, :name) } end
# Build the tools for the Ollama LLM
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/ollama.rb#L66-L68
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.Messages.AnthropicMessage.initialize
def initialize( role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil ) raise ArgumentError, "Role must be one of #{ROLES.join(", ")}" unless ROLES.include?(role) raise ArgumentError, "Tool calls must be an array of hashes" unless tool_calls.is_a?(Array) && tool_calls.all? { |tool_call| tool_call.is_a?(Hash) } @role = role # Some Tools return content as a JSON hence `.to_s` @content = content.to_s @image_url = image_url @tool_calls = tool_calls @tool_call_id = tool_call_id end
# @param tool_call_id [String] The ID of the tool call
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/anthropic_message.rb#L21-L37
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.Messages.MistralAIMessage.assistant_hash
def assistant_hash { role: "assistant", content: content, tool_calls: tool_calls, prefix: false } end
# @return [Hash] The message as an MistralAI API-compatible hash, with the role as "assistant"
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/mistral_ai_message.rb#L82-L89
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.Messages.MistralAIMessage.build_content_array
def build_content_array content_details = [] if content && !content.empty? content_details << { type: "text", text: content } end if image_url content_details << { type: "image_url", image_url: image_url } end content_details end
# @return [Array<Hash>] An array of content hashes, with keys :type and :text or :image_url.
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/mistral_ai_message.rb#L121-L139
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.Messages.OllamaMessage.system?
def system? role == "system" end
# @return [Boolean] true/false whether this message are system instructions
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/ollama_message.rb#L67-L69
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Assistant.Messages.OpenAIMessage.tool?
def tool? role == "tool" end
# @return [Boolean] true/false whether this message is a tool call
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/openai_message.rb#L81-L83
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Chunker.Markdown.chunks
def chunks splitter = Baran::MarkdownSplitter.new( chunk_size: chunk_size, chunk_overlap: chunk_overlap ) splitter.chunks(text).map do |chunk| Langchain::Chunk.new(text: chunk[:text]) end end
# @return [Array<Langchain::Chunk>]
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/chunker/markdown.rb#L25-L34
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Chunker.RecursiveText.chunks
def chunks splitter = Baran::RecursiveCharacterTextSplitter.new( chunk_size: chunk_size, chunk_overlap: chunk_overlap, separators: separators ) splitter.chunks(text).map do |chunk| Langchain::Chunk.new(text: chunk[:text]) end end
# @return [Array<Langchain::Chunk>]
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/chunker/recursive_text.rb#L26-L36
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Evals.Ragas.AnswerRelevance.initialize
def initialize(llm:, batch_size: 3) @llm = llm @batch_size = batch_size end
# @param batch_size [Integer] Batch size, i.e., number of generated questions to compare to the original question
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/evals/ragas/answer_relevance.rb#L15-L18
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Evals.Ragas.Main.answer_relevance
def answer_relevance @answer_relevance ||= Langchain::Evals::Ragas::AnswerRelevance.new(llm: llm) end
# @return [Langchain::Evals::Ragas::AnswerRelevance] Class instance
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/evals/ragas/main.rb#L54-L56
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::LLM.Base.chat
def chat(...) raise NotImplementedError, "#{self.class.name} does not support chat" end
# @raise NotImplementedError if not supported by the LLM
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/base.rb#L48-L50
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::LLM.Base.chat_parameters
def chat_parameters(params = {}) @chat_parameters ||= Langchain::LLM::Parameters::Chat.new( parameters: params ) end
#
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/base.rb#L81-L85
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::LLM.Cohere.summarize
def summarize(text:) response = client.summarize(text: text) response.dig("summary") end
# Generate a summary in English for a given text
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/cohere.rb#L116-L119
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::LLM.Ollama.initialize
def initialize(url: "http://localhost:11434", api_key: nil, default_options: {}) depends_on "faraday" @url = url @api_key = api_key @defaults = DEFAULTS.merge(default_options) chat_parameters.update( model: {default: @defaults[:chat_model]}, temperature: {default: @defaults[:temperature]}, template: {}, stream: {default: false}, response_format: {default: @defaults[:response_format]}, options: {default: @defaults[:options]} ) chat_parameters.remap(response_format: :format) end
# Initialize the Ollama client
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/ollama.rb#L40-L54
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::LLM.Ollama.complete
def complete( prompt:, model: defaults[:completion_model], images: nil, format: nil, system: nil, template: nil, context: nil, raw: nil, mirostat: nil, mirostat_eta: nil, mirostat_tau: nil, num_ctx: nil, num_gqa: nil, num_gpu: nil, num_thread: nil, repeat_last_n: nil, repeat_penalty: nil, temperature: defaults[:temperature], seed: nil, stop: nil, tfs_z: nil, num_predict: nil, top_k: nil, top_p: nil, stop_sequences: nil, &block ) if stop_sequences stop = stop_sequences end parameters = { prompt: prompt, model: model, images: images, format: format, system: system, template: template, context: context, stream: block_given?, # rubocop:disable Performance/BlockGivenWithExplicitBlock raw: raw }.compact llm_parameters = { mirostat: mirostat, mirostat_eta: mirostat_eta, mirostat_tau: mirostat_tau, num_ctx: num_ctx, num_gqa: num_gqa, num_gpu: num_gpu, num_thread: num_thread, repeat_last_n: repeat_last_n, repeat_penalty: repeat_penalty, temperature: temperature, seed: seed, stop: stop, tfs_z: tfs_z, num_predict: num_predict, top_k: top_k, top_p: top_p } parameters[:options] = llm_parameters.compact responses_stream = [] client.post("api/generate", parameters) do |req| req.options.on_data = json_responses_chunk_handler do |parsed_chunk| responses_stream << parsed_chunk block&.call(OllamaResponse.new(parsed_chunk, model: parameters[:model])) end end generate_final_completion_response(responses_stream, parameters[:model]) end
#
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/ollama.rb#L81-L156
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.LLM.BaseResponse.chat_completion
def chat_completion raise NotImplementedError end
# Returns the chat completion text
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/response/base_response.rb#L35-L37
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::OutputParsers.OutputFixingParser.get_format_instructions
def get_format_instructions parser.get_format_instructions end
# calls get_format_instructions on the @parser
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/output_parsers/output_fixing_parser.rb#L35-L37
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::OutputParsers.StructuredOutputParser.initialize
def initialize(schema:) @schema = validate_schema!(schema) end
# @param schema [JSON::Schema] The json schema
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/output_parsers/structured_output_parser.rb#L13-L15
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Processors.Eml.clean_content
def clean_content(content) content .gsub(/\[cid:[^\]]+\]/, "") # Remove embedded image references .gsub(URI::DEFAULT_PARSER.make_regexp(%w[http https])) { |match| "<#{match}>" } # Format URLs .gsub(/\r\n?/, "\n") # Normalize line endings to Unix style .gsub(/[\u200B-\u200D\uFEFF]/, "") # Remove zero width spaces and similar characters .gsub(/<\/?[^>]+>/, "") # Remove any HTML tags that might have sneaked in end
# Clean and format the extracted content
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/processors/eml.rb#L54-L61
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Processors.Markdown.parse
def parse(data) data.read end
# Parse the document and return the text
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/processors/markdown.rb#L12-L14
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Prompt.PromptTemplate.initialize
def initialize(template:, input_variables:, validate_template: true) @template = template @input_variables = input_variables @validate_template = validate_template validate(template: @template, input_variables: @input_variables) if @validate_template end
#
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/prompt/prompt_template.rb#L45-L51
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Tool.Database.describe_table
def describe_table(table) # TODO: There's probably a clear way to do all of this below primary_key_columns = [] primary_key_column_count = db.schema(table).count { |column| column[1][:primary_key] == true } schema = "CREATE TABLE #{table}(\n" db.schema(table).each do |column| schema << "#{column[0]} #{column[1][:type]}" if column[1][:primary_key] == true schema << " PRIMARY KEY" if primary_key_column_count == 1 else primary_key_columns << column[0] end schema << " COMMENT '#{column[1][:comment]}'" if column[1][:comment] schema << ",\n" unless column == db.schema(table).last && primary_key_column_count == 1 end if primary_key_column_count > 1 schema << "PRIMARY KEY (#{primary_key_columns.join(",")})" end db.foreign_key_list(table).each do |fk| schema << ",\n" if fk == db.foreign_key_list(table).first schema << "FOREIGN KEY (#{fk[:columns]&.first}) REFERENCES #{fk[:table]}(#{fk[:key]&.first})" schema << ",\n" unless fk == db.foreign_key_list(table).last end schema << ");\n" tool_response(content: schema) end
# @return [Langchain::Tool::Response] The schema for the table
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool/database.rb#L107-L135
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Tool.Tavily.search
def search( query:, search_depth: "basic", include_images: false, include_answer: false, include_raw_content: false, max_results: 5, include_domains: [], exclude_domains: [] ) uri = URI("https://api.tavily.com/search") request = Net::HTTP::Post.new(uri) request.content_type = "application/json" request.body = { api_key: @api_key, query: query, search_depth: search_depth, include_images: include_images, include_answer: include_answer, include_raw_content: include_raw_content, max_results: max_results, include_domains: include_domains, exclude_domains: exclude_domains }.to_json response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == "https") do |http| http.request(request) end tool_response(content: response.body) end
# @return [Langchain::Tool::Response] The search results in JSON format.
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool/tavily.rb#L45-L74
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain.Utils.CosineSimilarity.calculate_similarity
def calculate_similarity return nil unless vector_a.is_a? Array return nil unless vector_b.is_a? Array return nil if vector_a.size != vector_b.size dot_product = 0 vector_a.zip(vector_b).each do |v1i, v2i| dot_product += v1i * v2i end a = vector_a.map { |n| n**2 }.reduce(:+) b = vector_b.map { |n| n**2 }.reduce(:+) dot_product / (Math.sqrt(a) * Math.sqrt(b)) end
# @return [Float] The cosine similarity between the two vectors
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/utils/cosine_similarity.rb#L17-L31
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Base.update_texts
def update_texts(...) raise NotImplementedError, "#{self.class.name} does not support updating texts" end
# Method supported by Vectorsearch DB to update a list of texts to the index
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/base.rb#L123-L125
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Base.similarity_search_by_vector
def similarity_search_by_vector(...) raise NotImplementedError, "#{self.class.name} does not support similarity search by vector" end
# You must generate your own vector using the same LLM that generated the embeddings stored in the Vectorsearch DB.
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/base.rb#L150-L152
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Base.ask
def ask(...) raise NotImplementedError, "#{self.class.name} does not support asking questions" end
# Method supported by Vectorsearch DB to answer a question given a context (data) pulled from your Vectorsearch DB.
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/base.rb#L155-L157
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Chroma.similarity_search
def similarity_search( query:, k: 4 ) embedding = llm.embed(text: query).embedding similarity_search_by_vector( embedding: embedding, k: k ) end
# @return [Chroma::Resources::Embedding] The response from the server
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/chroma.rb#L94-L104
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Chroma.ask
def ask(question:, k: 4, &block) search_results = similarity_search(query: question, k: k) context = search_results.map do |result| result.document end context = context.join("\n---\n") prompt = generate_rag_prompt(question: question, context: context) messages = [{role: "user", content: prompt}] response = llm.chat(messages: messages, &block) response.context = context response end
# @return [String] The answer to the question
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/chroma.rb#L128-L144
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Elasticsearch.delete_default_schema
def delete_default_schema es_client.indices.delete( index: index_name ) end
# @return [Elasticsearch::Response] Index deletion
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/elasticsearch.rb#L100-L104
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Elasticsearch.similarity_search_by_vector
def similarity_search_by_vector(embedding: [], k: 10, query: {}) if embedding.empty? && query.empty? raise "Either embedding or query should pass as an argument" end query = default_query(embedding) if query.empty? es_client.search(body: {query: query, size: k}).body end
# @return [Elasticsearch::Response] The response from the server
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/elasticsearch.rb#L186-L194
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Epsilla.add_texts
def add_texts(texts:, ids: nil) validated_ids = ids if ids.nil? validated_ids = texts.map { SecureRandom.uuid } elsif ids.length != texts.length raise "The number of ids must match the number of texts" end data = texts.map.with_index do |text, idx| {Doc: text, Embedding: llm.embed(text: text).embedding, ID: validated_ids[idx]} end status_code, response = @client.database.insert(@table_name, data) raise "Failed to insert texts: #{response}" if status_code != 200 JSON.parse(response) end
# @param ids [Array<String>] The unique ids to add to the index, in the same order as the texts; if nil, it will be random uuids
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/epsilla.rb#L83-L98
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Milvus.get_default_schema
def get_default_schema client.collections.describe(collection_name: index_name) end
# @return [Hash] The response from the server
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/milvus.rb#L102-L104
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Pgvector.remove_texts
def remove_texts(ids:) @db[table_name.to_sym].where(id: ids).delete end
# @return [Integer] The number of texts removed from the index
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/pgvector.rb#L96-L98
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Qdrant.find
def find(ids: []) client.points.get_all( collection_name: index_name, ids: ids, with_payload: true, with_vector: true ) end
# @return [Hash] The response from the server
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/qdrant.rb#L36-L43
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Qdrant.remove_texts
def remove_texts(ids:) client.points.delete( collection_name: index_name, points: ids ) end
# @return [Hash] The response from the server
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/qdrant.rb#L71-L76
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Qdrant.get_default_schema
def get_default_schema client.collections.get(collection_name: index_name) end
# @return [Hash] The response from the server
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/qdrant.rb#L80-L82
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Qdrant.similarity_search_by_vector
def similarity_search_by_vector( embedding:, k: 4 ) response = client.points.search( collection_name: index_name, limit: k, vector: embedding, with_payload: true, with_vector: true ) response.dig("result") end
# @return [Hash] The response from the server
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/qdrant.rb#L122-L134
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Weaviate.similarity_search_by_vector
def similarity_search_by_vector(embedding:, k: 4) near_vector = "{ vector: #{embedding} }" client.query.get( class_name: index_name, near_vector: near_vector, limit: k.to_s, fields: "__id content _additional { id }" ) end
# Return documents similar to the vector
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/weaviate.rb#L131-L140
0d9f46ee75cc1556c753d60bf574dc3956e23395
langchainrb
github_2023
patterns-ai-core
ruby
Langchain::Vectorsearch.Weaviate.ask
def ask(question:, k: 4, &block) search_results = similarity_search(query: question, k: k) context = search_results.map do |result| result.dig("content").to_s end context = context.join("\n---\n") prompt = generate_rag_prompt(question: question, context: context) messages = [{role: "user", content: prompt}] response = llm.chat(messages: messages, &block) response.context = context response end
# Ask a question and return the answer
https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/weaviate.rb#L147-L162
0d9f46ee75cc1556c753d60bf574dc3956e23395