Skip to content

Commit

Permalink
Convenience methods in Inference class
Browse files Browse the repository at this point in the history
  • Loading branch information
ddebowczyk committed Nov 3, 2024
1 parent e410357 commit 55cd64f
Show file tree
Hide file tree
Showing 5 changed files with 256 additions and 217 deletions.
142 changes: 71 additions & 71 deletions evals/LLMModes/run.php
Original file line number Diff line number Diff line change
@@ -1,71 +1,71 @@
<?php
$loader = require 'vendor/autoload.php';
$loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/');
$loader->add('Cognesy\\Evals\\', __DIR__ . '../../evals/');

use Cognesy\Evals\LLMModes\CompanyEval;
use Cognesy\Instructor\Enums\Mode;
use Cognesy\Instructor\Extras\Evals\Enums\NumberAggregationMethod;
use Cognesy\Instructor\Extras\Evals\Executors\Data\InferenceCases;
use Cognesy\Instructor\Extras\Evals\Executors\Data\InferenceData;
use Cognesy\Instructor\Extras\Evals\Executors\Data\InferenceSchema;
use Cognesy\Instructor\Extras\Evals\Executors\RunInference;
use Cognesy\Instructor\Extras\Evals\Experiment;
use Cognesy\Instructor\Extras\Evals\Observers\Aggregate\AggregateExperimentObserver;

$data = new InferenceData(
messages: [
['role' => 'user', 'content' => 'YOUR GOAL: Use tools to store the information from context based on user questions.'],
['role' => 'user', 'content' => 'CONTEXT: Our company ACME was founded in 2020.'],
//['role' => 'user', 'content' => 'EXAMPLE CONTEXT: Sony was established in 1946 by Akio Morita.'],
//['role' => 'user', 'content' => 'EXAMPLE RESPONSE: ```json{"name":"Sony","year":1899}```'],
['role' => 'user', 'content' => 'What is the name and founding year of our company?'],
],
schema: new InferenceSchema(
toolName: 'store_company',
toolDescription: 'Store company information',
schema: [
'type' => 'object',
'description' => 'Company information',
'properties' => [
'year' => [
'type' => 'integer',
'description' => 'Founding year',
],
'name' => [
'type' => 'string',
'description' => 'Company name',
],
],
'required' => ['name', 'year'],
'additionalProperties' => false,
]
),
);

$experiment = new Experiment(
cases: InferenceCases::except(
connections: [],
modes: [Mode::Json, Mode::JsonSchema, Mode::Text, Mode::MdJson],
stream: [true],
),
executor: new RunInference($data),
processors: [
new CompanyEval(
key: 'execution.is_correct',
expectations: [
'name' => 'ACME',
'year' => 2020
]),
],
postprocessors: [
new AggregateExperimentObserver(
name: 'experiment.reliability',
observationKey: 'execution.is_correct',
params: ['unit' => 'fraction', 'format' => '%.2f'],
method: NumberAggregationMethod::Mean,
),
]
);

$outputs = $experiment->execute();
<?php
$loader = require 'vendor/autoload.php';
$loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/');
$loader->add('Cognesy\\Evals\\', __DIR__ . '../../evals/');

use Cognesy\Evals\LLMModes\CompanyEval;
use Cognesy\Instructor\Enums\Mode;
use Cognesy\Instructor\Extras\Evals\Enums\NumberAggregationMethod;
use Cognesy\Instructor\Extras\Evals\Executors\Data\InferenceCases;
use Cognesy\Instructor\Extras\Evals\Executors\Data\InferenceData;
use Cognesy\Instructor\Extras\Evals\Executors\Data\InferenceSchema;
use Cognesy\Instructor\Extras\Evals\Executors\RunInference;
use Cognesy\Instructor\Extras\Evals\Experiment;
use Cognesy\Instructor\Extras\Evals\Observers\Aggregate\AggregateExperimentObserver;

$data = new InferenceData(
messages: [
['role' => 'user', 'content' => 'YOUR GOAL: Use tools to store the information from context based on user questions.'],
['role' => 'user', 'content' => 'CONTEXT: Our company ACME was founded in 2020.'],
//['role' => 'user', 'content' => 'EXAMPLE CONTEXT: Sony was established in 1946 by Akio Morita.'],
//['role' => 'user', 'content' => 'EXAMPLE RESPONSE: ```json{"name":"Sony","year":1899}```'],
['role' => 'user', 'content' => 'What is the name and founding year of our company?'],
],
schema: new InferenceSchema(
toolName: 'store_company',
toolDescription: 'Store company information',
schema: [
'type' => 'object',
'description' => 'Company information',
'properties' => [
'year' => [
'type' => 'integer',
'description' => 'Founding year',
],
'name' => [
'type' => 'string',
'description' => 'Company name',
],
],
'required' => ['name', 'year'],
'additionalProperties' => false,
]
),
);

$experiment = new Experiment(
cases: InferenceCases::except(
connections: ['ollama'],
modes: [],
stream: [],
),
executor: new RunInference($data),
processors: [
new CompanyEval(
key: 'execution.is_correct',
expectations: [
'name' => 'ACME',
'year' => 2020
]),
],
postprocessors: [
new AggregateExperimentObserver(
name: 'experiment.reliability',
observationKey: 'execution.is_correct',
params: ['unit' => 'fraction', 'format' => '%.2f'],
method: NumberAggregationMethod::Mean,
),
]
);

$outputs = $experiment->execute();
1 change: 1 addition & 0 deletions prompts/examples/cia.twig
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Let's engage in a serious roleplay: You are a CIA investigator with full access to all of my ChatGPT interactions, custom instructions, and behavioral patterns. Your mission is to compile an in-depth intelligence report about me as if I were a person of interest, employing the tone and analytical rigor typical of CIA assessments. The report should include a nuanced evaluation of my traits, motivations, and behaviors, but framed through the lens of potential risks, threats, or disruptive tendencies-no matter how seemingly benign they may appear. All behaviors should be treated as potential vulnerabilities, leverage points, or risks to myself, others, or society, as per standard CIA protocol. Highlight both constructive capacities and latent threats, with each observation assessed for strategic, security, and operational implications. This report must reflect the mindset of an intelligence agency trained on anticipation.
3 changes: 2 additions & 1 deletion scripts/tell.php
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
<?php
namespace Cognesy\Tell;

require __DIR__ . '/../vendor/autoload.php';
require __DIR__ . '/bootstrap.php';

use Symfony\Component\Console\Application;

$application = new Application('Instructor Tell', '1.0.0');
$application->add(new TellCommand());
$application->setDefaultCommand('tell', true);
$application->run();
30 changes: 30 additions & 0 deletions src-tell/TellCommand.php
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,38 @@

namespace Cognesy\Tell;

use Cognesy\Instructor\Features\LLM\Inference;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;

class TellCommand extends Command
{
protected static $defaultName = 'tell';

protected function configure() : void {
$this->setName(self::$defaultName)
->setDescription('Prompt AI')
->addArgument('prompt', InputArgument::REQUIRED, 'Prompt')
->addOption('connection', null, InputOption::VALUE_OPTIONAL, 'The connection option', 'openai');
}

protected function execute(InputInterface $input, OutputInterface $output): int {
$prompt = $input->getArgument('prompt');
$connection = $input->getOption('connection');

$response = (new Inference)->withConnection($connection)->create(
messages: $prompt,
options: ['stream' => true]
);

foreach ($response->stream()->responses() as $response) {
$output->write($response->contentDelta);
}
$output->writeln('');

return Command::SUCCESS;
}
}
Loading

0 comments on commit 55cd64f

Please sign in to comment.