Skip to content

Commit

Permalink
Merge branch 'develop' into dad-joke
Browse files Browse the repository at this point in the history
  • Loading branch information
pann0x authored Jan 11, 2025
2 parents 0dd9d8d + 03c2fa2 commit 89ba69f
Show file tree
Hide file tree
Showing 171 changed files with 14,687 additions and 1,051 deletions.
125 changes: 99 additions & 26 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -100,32 +100,32 @@ MEDIUM_HYPERBOLIC_MODEL= # Default: meta-llama/Meta-Llama-3.1-70B-Instruc
LARGE_HYPERBOLIC_MODEL= # Default: meta-llama/Meta-Llama-3.1-405-Instruct

# Infera Configuration
INFERA_API_KEY= # visit api.infera.org/docs to obtain an API key under /signup_user
INFERA_MODEL= # Default: llama3.2:latest
INFERA_SERVER_URL= # Default: https://api.infera.org/
SMALL_INFERA_MODEL= #Recommended: llama3.2:latest
MEDIUM_INFERA_MODEL= #Recommended: mistral-nemo:latest
LARGE_INFERA_MODEL= #Recommended: mistral-small:latest

# Venice Configuration
VENICE_API_KEY= # generate from venice settings
SMALL_VENICE_MODEL= # Default: llama-3.3-70b
MEDIUM_VENICE_MODEL= # Default: llama-3.3-70b
LARGE_VENICE_MODEL= # Default: llama-3.1-405b
IMAGE_VENICE_MODEL= # Default: fluently-xl

# Nineteen.ai Configuration
NINETEEN_AI_API_KEY= # Get a free api key from https://nineteen.ai/app/api
SMALL_NINETEEN_AI_MODEL= # Default: unsloth/Llama-3.2-3B-Instruct
MEDIUM_NINETEEN_AI_MODEL= # Default: unsloth/Meta-Llama-3.1-8B-Instruct
LARGE_NINETEEN_AI_MODEL= # Default: hugging-quants/Meta-Llama-3.1-70B-Instruct-AWQ-INT4
IMAGE_NINETEEN_AI_MODE= # Default: dataautogpt3/ProteusV0.4-Lightning

# Akash Chat API Configuration docs: https://chatapi.akash.network/documentation
AKASH_CHAT_API_KEY= # Get from https://chatapi.akash.network/
SMALL_AKASH_CHAT_API_MODEL= # Default: Meta-Llama-3-2-3B-Instruct
MEDIUM_AKASH_CHAT_API_MODEL= # Default: Meta-Llama-3-3-70B-Instruct
LARGE_AKASH_CHAT_API_MODEL= # Default: Meta-Llama-3-1-405B-Instruct-FP8
INFERA_API_KEY= # visit api.infera.org/docs to obtain an API key under /signup_user
INFERA_MODEL= # Default: llama3.2:latest
INFERA_SERVER_URL= # Default: https://api.infera.org/
SMALL_INFERA_MODEL= #Recommended: llama3.2:latest
MEDIUM_INFERA_MODEL= #Recommended: mistral-nemo:latest
LARGE_INFERA_MODEL= #Recommended: mistral-small:latest

# Venice Configuration
VENICE_API_KEY= # generate from venice settings
SMALL_VENICE_MODEL= # Default: llama-3.3-70b
MEDIUM_VENICE_MODEL= # Default: llama-3.3-70b
LARGE_VENICE_MODEL= # Default: llama-3.1-405b
IMAGE_VENICE_MODEL= # Default: fluently-xl

# Nineteen.ai Configuration
NINETEEN_AI_API_KEY= # Get a free api key from https://nineteen.ai/app/api
SMALL_NINETEEN_AI_MODEL= # Default: unsloth/Llama-3.2-3B-Instruct
MEDIUM_NINETEEN_AI_MODEL= # Default: unsloth/Meta-Llama-3.1-8B-Instruct
LARGE_NINETEEN_AI_MODEL= # Default: hugging-quants/Meta-Llama-3.1-70B-Instruct-AWQ-INT4
IMAGE_NINETEEN_AI_MODE= # Default: dataautogpt3/ProteusV0.4-Lightning

# Akash Chat API Configuration docs: https://chatapi.akash.network/documentation
AKASH_CHAT_API_KEY= # Get from https://chatapi.akash.network/
SMALL_AKASH_CHAT_API_MODEL= # Default: Meta-Llama-3-2-3B-Instruct
MEDIUM_AKASH_CHAT_API_MODEL= # Default: Meta-Llama-3-3-70B-Instruct
LARGE_AKASH_CHAT_API_MODEL= # Default: Meta-Llama-3-1-405B-Instruct-FP8

# Livepeer configuration
LIVEPEER_GATEWAY_URL= # Free inference gateways and docs: https://livepeer-eliza.com/
Expand Down Expand Up @@ -184,6 +184,12 @@ MEDIUM_GOOGLE_MODEL= # Default: gemini-1.5-flash-latest
LARGE_GOOGLE_MODEL= # Default: gemini-1.5-pro-latest
EMBEDDING_GOOGLE_MODEL= # Default: text-embedding-004

# Mistral Configuration
MISTRAL_MODEL=
SMALL_MISTRAL_MODEL= # Default: mistral-small-latest
MEDIUM_MISTRAL_MODEL= # Default: mistral-large-latest
LARGE_MISTRAL_MODEL= # Default: mistral-large-latest

# Groq Configuration
GROQ_API_KEY= # Starts with gsk_
SMALL_GROQ_MODEL= # Default: llama-3.1-8b-instant
Expand Down Expand Up @@ -232,6 +238,13 @@ MEDIUM_VOLENGINE_MODEL= # Default: doubao-pro-128k
LARGE_VOLENGINE_MODEL= # Default: doubao-pro-256k
VOLENGINE_EMBEDDING_MODEL= # Default: doubao-embedding

# DeepSeek Configuration
DEEPSEEK_API_URL= # Default: https://api.deepseek.com
SMALL_DEEPSEEK_MODEL= # Default: deepseek-chat
MEDIUM_DEEPSEEK_MODEL= # Default: deepseek-chat
LARGE_DEEPSEEK_MODEL= # Default: deepseek-chat


# fal.ai Configuration
FAL_API_KEY=
FAL_AI_LORA_PATH=
Expand Down Expand Up @@ -306,6 +319,10 @@ STARKNET_ADDRESS=
STARKNET_PRIVATE_KEY=
STARKNET_RPC_URL=

# Lens Network Configuration
LENS_ADDRESS=
LENS_PRIVATE_KEY=

# Coinbase
COINBASE_COMMERCE_KEY= # From Coinbase developer portal
COINBASE_API_KEY= # From Coinbase developer portal
Expand Down Expand Up @@ -359,6 +376,12 @@ FLOW_ENDPOINT_URL= # Default: https://mainnet.onflow.org
INTERNET_COMPUTER_PRIVATE_KEY=
INTERNET_COMPUTER_ADDRESS=


#Cloudflare AI Gateway
CLOUDFLARE_GW_ENABLED= # Set to true to enable Cloudflare AI Gateway
CLOUDFLARE_AI_ACCOUNT_ID= # Cloudflare AI Account ID - found in the Cloudflare Dashboard under AI Gateway
CLOUDFLARE_AI_GATEWAY_ID= # Cloudflare AI Gateway ID - found in the Cloudflare Dashboard under AI Gateway

# Aptos
APTOS_PRIVATE_KEY= # Aptos private key
APTOS_NETWORK= # Must be one of mainnet, testnet
Expand Down Expand Up @@ -477,6 +500,56 @@ TAVILY_API_KEY=
VERIFIABLE_INFERENCE_ENABLED=false # Set to false to disable verifiable inference
VERIFIABLE_INFERENCE_PROVIDER=opacity # Options: opacity


# Autonome Configuration
AUTONOME_JWT_TOKEN=
AUTONOME_RPC=https://wizard-bff-rpc.alt.technology/v1/bff/aaa/apps

####################################
#### Akash Network Configuration ####
####################################
AKASH_ENV=mainnet
AKASH_NET=https://raw.githubusercontent.com/ovrclk/net/master/mainnet
RPC_ENDPOINT=https://rpc.akashnet.net:443
AKASH_GAS_PRICES=0.025uakt
AKASH_GAS_ADJUSTMENT=1.5
AKASH_KEYRING_BACKEND=os
AKASH_FROM=default
AKASH_FEES=20000uakt
AKASH_DEPOSIT=500000uakt
AKASH_MNEMONIC=
AKASH_WALLET_ADDRESS=
# Akash Pricing API
AKASH_PRICING_API_URL=https://console-api.akash.network/v1/pricing
# Default values # 1 CPU = 1000 1GB = 1000000000 1GB = 1000000000
AKASH_DEFAULT_CPU=1000
AKASH_DEFAULT_MEMORY=1000000000
AKASH_DEFAULT_STORAGE=1000000000
AKASH_SDL=example.sdl.yml
# Close deployment
# Close all deployments = closeAll
# Close a single deployment = dseq and add the value in AKASH_CLOSE_DSEQ
AKASH_CLOSE_DEP=closeAll
AKASH_CLOSE_DSEQ=19729929
# Provider Info we added one to check you will have to pass this into the action
AKASH_PROVIDER_INFO=akash1ccktptfkvdc67msasmesuy5m7gpc76z75kukpz
# Deployment Status
# AKASH_DEP_STATUS = dseq or param_passed when you are building you wil pass the dseq dinamically to test you
# you can pass the dseq using AKASH_DEP_DSEQ 19729929 is an example of a dseq we test while build.
AKASH_DEP_STATUS=dseq
AKASH_DEP_DSEQ=19729929
# Gas Estimation Options: close, create, or update
# qseq is required when operation is "close" 19729929 is an example of a dseq we test while build.
AKASH_GAS_OPERATION=close
AKASH_GAS_DSEQ=19729929
# Manifest
# Values: "auto" | "manual" | "validate_only" Default: "auto"
AKASH_MANIFEST_MODE=auto
# Default: Will use the SDL directory
AKASH_MANIFEST_PATH=
# Values: "strict" | "lenient" | "none" - Default: "strict"
AKASH_MANIFEST_VALIDATION_LEVEL=strict

# Quai Network Ecosystem
QUAI_PRIVATE_KEY=
QUAI_RPC_URL=https://rpc.quai.network
15 changes: 6 additions & 9 deletions .github/workflows/integrationTests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ on:
push:
branches:
- "*"
pull_request_target:
pull_request:
branches:
- "*"

Expand Down Expand Up @@ -33,12 +33,9 @@ jobs:
- name: Build packages
run: pnpm build

- name: Check for API key
run: |
if [ -z "$OPENAI_API_KEY" ]; then
echo "Error: OPENAI_API_KEY is not set."
exit 1
fi
- name: Run integration tests
run: pnpm run integrationTests
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
COINBASE_COMMERCE_KEY: ${{ secrets.COINBASE_COMMERCE_KEY }}
run: |
pnpm run integrationTests
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ git clone https://github.com/elizaos/eliza.git
# Checkout the latest release
# This project iterates fast, so we recommend checking out the latest release
git checkout $(git describe --tags --abbrev=0)
# If the above doesn't checkout the latest release, this should work:
# git checkout $(git describe --tags `git rev-list --tags --max-count=1`)
```

### Start Eliza with Gitpod
Expand Down
12 changes: 9 additions & 3 deletions agent/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"exec": "node --enable-source-maps --loader ts-node/esm src/index.ts"
},
"dependencies": {
"@elizaos/adapter-supabase": "workspace:*",
"@elizaos/adapter-postgres": "workspace:*",
"@elizaos/adapter-redis": "workspace:*",
"@elizaos/adapter-sqlite": "workspace:*",
Expand All @@ -34,7 +35,6 @@
"@elizaos/plugin-0g": "workspace:*",
"@elizaos/plugin-abstract": "workspace:*",
"@elizaos/plugin-aptos": "workspace:*",
"@elizaos/plugin-coingecko": "workspace:*",
"@elizaos/plugin-coinmarketcap": "workspace:*",
"@elizaos/plugin-coingecko": "workspace:*",
"@elizaos/plugin-binance": "workspace:*",
Expand All @@ -50,10 +50,12 @@
"@elizaos/plugin-gitbook": "workspace:*",
"@elizaos/plugin-story": "workspace:*",
"@elizaos/plugin-goat": "workspace:*",
"@elizaos/plugin-lensNetwork": "workspace:*",
"@elizaos/plugin-icp": "workspace:*",
"@elizaos/plugin-image-generation": "workspace:*",
"@elizaos/plugin-movement": "workspace:*",
"@elizaos/plugin-nft-generation": "workspace:*",
"@elizaos/plugin-massa": "workspace:*",
"@elizaos/plugin-nft-generation": "workspace:*",
"@elizaos/plugin-node": "workspace:*",
"@elizaos/plugin-solana": "workspace:*",
"@elizaos/plugin-solana-agentkit": "workspace:*",
Expand All @@ -71,6 +73,7 @@
"@elizaos/plugin-near": "workspace:*",
"@elizaos/plugin-zksync-era": "workspace:*",
"@elizaos/plugin-twitter": "workspace:*",
"@elizaos/plugin-primus": "workspace:*",
"@elizaos/plugin-cronoszkevm": "workspace:*",
"@elizaos/plugin-3d-generation": "workspace:*",
"@elizaos/plugin-fuel": "workspace:*",
Expand All @@ -86,6 +89,9 @@
"@elizaos/plugin-arthera": "workspace:*",
"@elizaos/plugin-allora": "workspace:*",
"@elizaos/plugin-opacity": "workspace:*",
"@elizaos/plugin-hyperliquid": "workspace:*",
"@elizaos/plugin-akash": "workspace:*",
"@elizaos/plugin-quai": "workspace:*",
"readline": "1.3.0",
"ws": "8.18.0",
"yargs": "17.7.2"
Expand All @@ -97,4 +103,4 @@
"ts-node": "10.9.2",
"tsup": "8.3.5"
}
}
}
Loading

0 comments on commit 89ba69f

Please sign in to comment.