Compare commits

..

No commits in common. "dev" and "2025.6.0b2" have entirely different histories.

3839 changed files with 51595 additions and 218845 deletions

View File

@ -8,7 +8,6 @@
"PYTHONASYNCIODEBUG": "1"
},
"features": {
"ghcr.io/anthropics/devcontainer-features/claude-code:1.0": {},
"ghcr.io/devcontainers/features/github-cli:1": {}
},
// Port 5683 udp is used by Shelly integration

View File

@ -1,14 +1,15 @@
name: Report an issue with Home Assistant Core
description: Report an issue with Home Assistant Core.
type: Bug
body:
- type: markdown
attributes:
value: |
This issue form is for reporting bugs only!
If you have a feature or enhancement request, please [request them here instead][fr].
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
[fr]: https://github.com/orgs/home-assistant/discussions
[fr]: https://community.home-assistant.io/c/feature-requests
- type: textarea
validations:
required: true

View File

@ -10,8 +10,8 @@ contact_links:
url: https://www.home-assistant.io/help
about: We use GitHub for tracking bugs, check our website for resources on getting help.
- name: Feature Request
url: https://github.com/orgs/home-assistant/discussions
about: Please use this link to request new features or enhancements to existing features.
url: https://community.home-assistant.io/c/feature-requests
about: Please use our Community Forum for making feature requests.
- name: I'm unsure where to go
url: https://www.home-assistant.io/join-chat
about: If you are unsure where to go, then joining our chat is recommended; Just ask!

View File

@ -1,53 +0,0 @@
name: Task
description: For staff only - Create a task
type: Task
body:
- type: markdown
attributes:
value: |
## ⚠️ RESTRICTED ACCESS
**This form is restricted to Open Home Foundation staff, authorized contributors, and integration code owners only.**
If you are a community member wanting to contribute, please:
- For bug reports: Use the [bug report form](https://github.com/home-assistant/core/issues/new?template=bug_report.yml)
- For feature requests: Submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)
---
### For authorized contributors
Use this form to create tasks for development work, improvements, or other actionable items that need to be tracked.
- type: textarea
id: description
attributes:
label: Description
description: |
Provide a clear and detailed description of the task that needs to be accomplished.
Be specific about what needs to be done, why it's important, and any constraints or requirements.
placeholder: |
Describe the task, including:
- What needs to be done
- Why this task is needed
- Expected outcome
- Any constraints or requirements
validations:
required: true
- type: textarea
id: additional_context
attributes:
label: Additional context
description: |
Any additional information, links, research, or context that would be helpful.
Include links to related issues, research, prototypes, roadmap opportunities etc.
placeholder: |
- Roadmap opportunity: [link]
- Epic: [link]
- Feature request: [link]
- Technical design documents: [link]
- Prototype/mockup: [link]
- Dependencies: [links]
validations:
required: false

File diff suppressed because it is too large Load Diff

View File

@ -6,6 +6,3 @@ updates:
interval: daily
time: "06:00"
open-pull-requests-limit: 10
labels:
- dependency
- github_actions

View File

@ -94,7 +94,7 @@ jobs:
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v11
uses: dawidd6/action-download-artifact@v9
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/frontend
@ -105,10 +105,10 @@ jobs:
- name: Download nightly wheels of intents
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v11
uses: dawidd6/action-download-artifact@v9
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: OHF-Voice/intents-package
repo: home-assistant/intents-package
branch: main
workflow: nightly.yaml
workflow_conclusion: success
@ -324,7 +324,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Install Cosign
uses: sigstore/cosign-installer@v3.9.2
uses: sigstore/cosign-installer@v3.8.2
with:
cosign-release: "v2.2.3"
@ -509,7 +509,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@ -522,7 +522,7 @@ jobs:
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@ -531,7 +531,7 @@ jobs:
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}

View File

@ -37,10 +37,10 @@ on:
type: boolean
env:
CACHE_VERSION: 4
CACHE_VERSION: 2
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.9"
HA_SHORT_VERSION: "2025.6"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@ -360,7 +360,7 @@ jobs:
- name: Run ruff
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github

View File

@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.29.5
uses: github/codeql-action/init@v3.28.18
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.29.5
uses: github/codeql-action/analyze@v3.28.18
with:
category: "/language:python"

View File

@ -1,385 +0,0 @@
name: Auto-detect duplicate issues
# yamllint disable-line rule:truthy
on:
issues:
types: [labeled]
permissions:
issues: write
models: read
jobs:
detect-duplicates:
runs-on: ubuntu-latest
steps:
- name: Check if integration label was added and extract details
id: extract
uses: actions/github-script@v7.0.1
with:
script: |
// Debug: Log the event payload
console.log('Event name:', context.eventName);
console.log('Event action:', context.payload.action);
console.log('Event payload keys:', Object.keys(context.payload));
// Check the specific label that was added
const addedLabel = context.payload.label;
if (!addedLabel) {
console.log('No label found in labeled event payload');
core.setOutput('should_continue', 'false');
return;
}
console.log(`Label added: ${addedLabel.name}`);
if (!addedLabel.name.startsWith('integration:')) {
console.log('Added label is not an integration label, skipping duplicate detection');
core.setOutput('should_continue', 'false');
return;
}
console.log(`Integration label added: ${addedLabel.name}`);
let currentIssue;
let integrationLabels = [];
try {
const issue = await github.rest.issues.get({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.payload.issue.number
});
currentIssue = issue.data;
// Check if potential-duplicate label already exists
const hasPotentialDuplicateLabel = currentIssue.labels
.some(label => label.name === 'potential-duplicate');
if (hasPotentialDuplicateLabel) {
console.log('Issue already has potential-duplicate label, skipping duplicate detection');
core.setOutput('should_continue', 'false');
return;
}
integrationLabels = currentIssue.labels
.filter(label => label.name.startsWith('integration:'))
.map(label => label.name);
} catch (error) {
core.error(`Failed to fetch issue #${context.payload.issue.number}:`, error.message);
core.setOutput('should_continue', 'false');
return;
}
// Check if we've already posted a duplicate detection comment recently
let comments;
try {
comments = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.payload.issue.number,
per_page: 10
});
} catch (error) {
core.error('Failed to fetch comments:', error.message);
// Continue anyway, worst case we might post a duplicate comment
comments = { data: [] };
}
// Check if we've already posted a duplicate detection comment
const recentDuplicateComment = comments.data.find(comment =>
comment.user && comment.user.login === 'github-actions[bot]' &&
comment.body.includes('<!-- workflow: detect-duplicate-issues -->')
);
if (recentDuplicateComment) {
console.log('Already posted duplicate detection comment, skipping');
core.setOutput('should_continue', 'false');
return;
}
core.setOutput('should_continue', 'true');
core.setOutput('current_number', currentIssue.number);
core.setOutput('current_title', currentIssue.title);
core.setOutput('current_body', currentIssue.body);
core.setOutput('current_url', currentIssue.html_url);
core.setOutput('integration_labels', JSON.stringify(integrationLabels));
console.log(`Current issue: #${currentIssue.number}`);
console.log(`Integration labels: ${integrationLabels.join(', ')}`);
- name: Fetch similar issues
id: fetch_similar
if: steps.extract.outputs.should_continue == 'true'
uses: actions/github-script@v7.0.1
env:
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
with:
script: |
const integrationLabels = JSON.parse(process.env.INTEGRATION_LABELS);
const currentNumber = parseInt(process.env.CURRENT_NUMBER);
if (integrationLabels.length === 0) {
console.log('No integration labels found, skipping duplicate detection');
core.setOutput('has_similar', 'false');
return;
}
// Use GitHub search API to find issues with matching integration labels
console.log(`Searching for issues with integration labels: ${integrationLabels.join(', ')}`);
// Build search query for issues with any of the current integration labels
const labelQueries = integrationLabels.map(label => `label:"${label}"`);
// Calculate date 6 months ago
const sixMonthsAgo = new Date();
sixMonthsAgo.setMonth(sixMonthsAgo.getMonth() - 6);
const dateFilter = `created:>=${sixMonthsAgo.toISOString().split('T')[0]}`;
let searchQuery;
if (labelQueries.length === 1) {
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue ${labelQueries[0]} ${dateFilter}`;
} else {
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue (${labelQueries.join(' OR ')}) ${dateFilter}`;
}
console.log(`Search query: ${searchQuery}`);
let result;
try {
result = await github.rest.search.issuesAndPullRequests({
q: searchQuery,
per_page: 15,
sort: 'updated',
order: 'desc'
});
} catch (error) {
core.error('Failed to search for similar issues:', error.message);
if (error.status === 403 && error.message.includes('rate limit')) {
core.error('GitHub API rate limit exceeded');
}
core.setOutput('has_similar', 'false');
return;
}
// Filter out the current issue, pull requests, and newer issues (higher numbers)
const similarIssues = result.data.items
.filter(item =>
item.number !== currentNumber &&
!item.pull_request &&
item.number < currentNumber // Only include older issues (lower numbers)
)
.map(item => ({
number: item.number,
title: item.title,
body: item.body,
url: item.html_url,
state: item.state,
createdAt: item.created_at,
updatedAt: item.updated_at,
comments: item.comments,
labels: item.labels.map(l => l.name)
}));
console.log(`Found ${similarIssues.length} issues with matching integration labels`);
console.log('Raw similar issues:', JSON.stringify(similarIssues.slice(0, 3), null, 2));
if (similarIssues.length === 0) {
console.log('No similar issues found, setting has_similar to false');
core.setOutput('has_similar', 'false');
return;
}
console.log('Similar issues found, setting has_similar to true');
core.setOutput('has_similar', 'true');
// Clean the issue data to prevent JSON parsing issues
const cleanedIssues = similarIssues.slice(0, 15).map(item => {
// Handle body with improved truncation and null handling
let cleanBody = '';
if (item.body && typeof item.body === 'string') {
// Remove control characters
const cleaned = item.body.replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
// Truncate to 1000 characters and add ellipsis if needed
cleanBody = cleaned.length > 1000
? cleaned.substring(0, 1000) + '...'
: cleaned;
}
return {
number: item.number,
title: item.title.replace(/[\u0000-\u001F\u007F-\u009F]/g, ''), // Remove control characters
body: cleanBody,
url: item.url,
state: item.state,
createdAt: item.createdAt,
updatedAt: item.updatedAt,
comments: item.comments,
labels: item.labels
};
});
console.log(`Cleaned issues count: ${cleanedIssues.length}`);
console.log('First cleaned issue:', JSON.stringify(cleanedIssues[0], null, 2));
core.setOutput('similar_issues', JSON.stringify(cleanedIssues));
- name: Detect duplicates using AI
id: ai_detection
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/ai-inference@v1.2.3
with:
model: openai/gpt-4o
system-prompt: |
You are a Home Assistant issue duplicate detector. Your task is to identify TRUE DUPLICATES - issues that report the EXACT SAME problem, not just similar or related issues.
CRITICAL: An issue is ONLY a duplicate if:
- It describes the SAME problem with the SAME root cause
- Issues about the same integration but different problems are NOT duplicates
- Issues with similar symptoms but different causes are NOT duplicates
Important considerations:
- Open issues are more relevant than closed ones for duplicate detection
- Recently updated issues may indicate ongoing work or discussion
- Issues with more comments are generally more relevant and active
- Older closed issues might be resolved differently than newer approaches
- Consider the time between issues - very old issues may have different contexts
Rules:
1. ONLY mark as duplicate if the issues describe IDENTICAL problems
2. Look for issues that report the same problem or request the same functionality
3. Different error messages = NOT a duplicate (even if same integration)
4. For CLOSED issues, only mark as duplicate if they describe the EXACT same problem
5. For OPEN issues, use a lower threshold (90%+ similarity)
6. Prioritize issues with higher comment counts as they indicate more activity/relevance
7. When in doubt, do NOT mark as duplicate
8. Return ONLY a JSON array of issue numbers that are duplicates
9. If no duplicates are found, return an empty array: []
10. Maximum 5 potential duplicates, prioritize open issues with comments
11. Consider the age of issues - prefer recent duplicates over very old ones
Example response format:
[1234, 5678, 9012]
prompt: |
Current issue (just created):
Title: ${{ steps.extract.outputs.current_title }}
Body: ${{ steps.extract.outputs.current_body }}
Other issues to compare against (each includes state, creation date, last update, and comment count):
${{ steps.fetch_similar.outputs.similar_issues }}
Analyze these issues and identify which ones describe IDENTICAL problems and thus are duplicates of the current issue. When sorting them, consider their state (open/closed), how recently they were updated, and their comment count (higher = more relevant).
max-tokens: 100
- name: Post duplicate detection results
id: post_results
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/github-script@v7.0.1
env:
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
with:
script: |
const aiResponse = process.env.AI_RESPONSE;
console.log('Raw AI response:', JSON.stringify(aiResponse));
let duplicateNumbers = [];
try {
// Clean the response of any potential control characters
const cleanResponse = aiResponse.trim().replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
console.log('Cleaned AI response:', cleanResponse);
duplicateNumbers = JSON.parse(cleanResponse);
// Ensure it's an array and contains only numbers
if (!Array.isArray(duplicateNumbers)) {
console.log('AI response is not an array, trying to extract numbers');
const numberMatches = cleanResponse.match(/\d+/g);
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
}
// Filter to only valid numbers
duplicateNumbers = duplicateNumbers.filter(n => typeof n === 'number' && !isNaN(n));
} catch (error) {
console.log('Failed to parse AI response as JSON:', error.message);
console.log('Raw response:', aiResponse);
// Fallback: try to extract numbers from the response
const numberMatches = aiResponse.match(/\d+/g);
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
console.log('Extracted numbers as fallback:', duplicateNumbers);
}
if (!Array.isArray(duplicateNumbers) || duplicateNumbers.length === 0) {
console.log('No duplicates detected by AI');
return;
}
console.log(`AI detected ${duplicateNumbers.length} potential duplicates: ${duplicateNumbers.join(', ')}`);
// Get details of detected duplicates
const similarIssues = JSON.parse(process.env.SIMILAR_ISSUES);
const duplicates = similarIssues.filter(issue => duplicateNumbers.includes(issue.number));
if (duplicates.length === 0) {
console.log('No matching issues found for detected numbers');
return;
}
// Create comment with duplicate detection results
const duplicateLinks = duplicates.map(issue => `- [#${issue.number}: ${issue.title}](${issue.url})`).join('\n');
const commentBody = [
'<!-- workflow: detect-duplicate-issues -->',
'### 🔍 **Potential duplicate detection**',
'',
'I\'ve analyzed similar issues and found the following potential duplicates:',
'',
duplicateLinks,
'',
'**What to do next:**',
'1. Please review these issues to see if they match your issue',
'2. If you find an existing issue that covers your problem:',
' - Consider closing this issue',
' - Add your findings or 👍 on the existing issue instead',
'3. If your issue is different or adds new aspects, please clarify how it differs',
'',
'This helps keep our issues organized and ensures similar issues are consolidated for better visibility.',
'',
'*This message was generated automatically by our duplicate detection system.*'
].join('\n');
try {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.payload.issue.number,
body: commentBody
});
console.log(`Posted duplicate detection comment with ${duplicates.length} potential duplicates`);
// Add the potential-duplicate label
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.payload.issue.number,
labels: ['potential-duplicate']
});
console.log('Added potential-duplicate label to the issue');
} catch (error) {
core.error('Failed to post duplicate detection comment or add label:', error.message);
if (error.status === 403) {
core.error('Permission denied or rate limit exceeded');
}
// Don't throw - we've done the analysis, just couldn't post the result
}

View File

@ -1,193 +0,0 @@
name: Auto-detect non-English issues
# yamllint disable-line rule:truthy
on:
issues:
types: [opened]
permissions:
issues: write
models: read
jobs:
detect-language:
runs-on: ubuntu-latest
steps:
- name: Check issue language
id: detect_language
uses: actions/github-script@v7.0.1
env:
ISSUE_NUMBER: ${{ github.event.issue.number }}
ISSUE_TITLE: ${{ github.event.issue.title }}
ISSUE_BODY: ${{ github.event.issue.body }}
ISSUE_USER_TYPE: ${{ github.event.issue.user.type }}
with:
script: |
// Get the issue details from environment variables
const issueNumber = process.env.ISSUE_NUMBER;
const issueTitle = process.env.ISSUE_TITLE || '';
const issueBody = process.env.ISSUE_BODY || '';
const userType = process.env.ISSUE_USER_TYPE;
// Skip language detection for bot users
if (userType === 'Bot') {
console.log('Skipping language detection for bot user');
core.setOutput('should_continue', 'false');
return;
}
console.log(`Checking language for issue #${issueNumber}`);
console.log(`Title: ${issueTitle}`);
// Combine title and body for language detection
const fullText = `${issueTitle}\n\n${issueBody}`;
// Check if the text is too short to reliably detect language
if (fullText.trim().length < 20) {
console.log('Text too short for reliable language detection');
core.setOutput('should_continue', 'false'); // Skip processing for very short text
return;
}
core.setOutput('issue_number', issueNumber);
core.setOutput('issue_text', fullText);
core.setOutput('should_continue', 'true');
- name: Detect language using AI
id: ai_language_detection
if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/ai-inference@v1.2.3
with:
model: openai/gpt-4o-mini
system-prompt: |
You are a language detection system. Your task is to determine if the provided text is written in English or another language.
Rules:
1. Analyze the text and determine the primary language of the USER'S DESCRIPTION only
2. IGNORE markdown headers (lines starting with #, ##, ###, etc.) as these are from issue templates, not user input
3. IGNORE all code blocks (text between ``` or ` markers) as they may contain system-generated error messages in other languages
4. IGNORE error messages, logs, and system output even if not in code blocks - these often appear in the user's system language
5. Consider technical terms, code snippets, URLs, and file paths as neutral (they don't indicate non-English)
6. Focus ONLY on the actual sentences and descriptions written by the user explaining their issue
7. If the user's explanation/description is in English but includes non-English error messages or logs, consider it ENGLISH
8. Return ONLY a JSON object with two fields:
- "is_english": boolean (true if the user's description is primarily in English, false otherwise)
- "detected_language": string (the name of the detected language, e.g., "English", "Spanish", "Chinese", etc.)
9. Be lenient - if the user's explanation is in English with non-English system output, it's still English
10. Common programming terms, error messages, and technical jargon should not be considered as non-English
11. If you cannot reliably determine the language, set detected_language to "undefined"
Example response:
{"is_english": false, "detected_language": "Spanish"}
prompt: |
Please analyze the following issue text and determine if it is written in English:
${{ steps.detect_language.outputs.issue_text }}
max-tokens: 50
- name: Process non-English issues
if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/github-script@v7.0.1
env:
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
with:
script: |
const issueNumber = parseInt(process.env.ISSUE_NUMBER);
const aiResponse = process.env.AI_RESPONSE;
console.log('AI language detection response:', aiResponse);
let languageResult;
try {
languageResult = JSON.parse(aiResponse.trim());
// Validate the response structure
if (!languageResult || typeof languageResult.is_english !== 'boolean') {
throw new Error('Invalid response structure');
}
} catch (error) {
core.error(`Failed to parse AI response: ${error.message}`);
console.log('Raw AI response:', aiResponse);
// Log more details for debugging
core.warning('Defaulting to English due to parsing error');
// Default to English if we can't parse the response
return;
}
if (languageResult.is_english) {
console.log('Issue is in English, no action needed');
return;
}
// If language is undefined or not detected, skip processing
if (!languageResult.detected_language || languageResult.detected_language === 'undefined') {
console.log('Language could not be determined, skipping processing');
return;
}
console.log(`Issue detected as non-English: ${languageResult.detected_language}`);
// Post comment explaining the language requirement
const commentBody = [
'<!-- workflow: detect-non-english-issues -->',
'### 🌐 Non-English issue detected',
'',
`This issue appears to be written in **${languageResult.detected_language}** rather than English.`,
'',
'The Home Assistant project uses English as the primary language for issues to ensure that everyone in our international community can participate and help resolve issues. This allows any of our thousands of contributors to jump in and provide assistance.',
'',
'**What to do:**',
'1. Re-create the issue using the English language',
'2. If you need help with translation, consider using:',
' - Translation tools like Google Translate',
' - AI assistants like ChatGPT or Claude',
'',
'This helps our community provide the best possible support and ensures your issue gets the attention it deserves from our global contributor base.',
'',
'Thank you for your understanding! 🙏'
].join('\n');
try {
// Add comment
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
body: commentBody
});
console.log('Posted language requirement comment');
// Add non-english label
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
labels: ['non-english']
});
console.log('Added non-english label');
// Close the issue
await github.rest.issues.update({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
state: 'closed',
state_reason: 'not_planned'
});
console.log('Closed the issue');
} catch (error) {
core.error('Failed to process non-English issue:', error.message);
if (error.status === 403) {
core.error('Permission denied or rate limit exceeded');
}
}

View File

@ -1,84 +0,0 @@
name: Restrict task creation
# yamllint disable-line rule:truthy
on:
issues:
types: [opened]
jobs:
check-authorization:
runs-on: ubuntu-latest
# Only run if this is a Task issue type (from the issue form)
if: github.event.issue.issue_type == 'Task'
steps:
- name: Check if user is authorized
uses: actions/github-script@v7
with:
script: |
const issueAuthor = context.payload.issue.user.login;
// First check if user is an organization member
try {
await github.rest.orgs.checkMembershipForUser({
org: 'home-assistant',
username: issueAuthor
});
console.log(`✅ ${issueAuthor} is an organization member`);
return; // Authorized, no need to check further
} catch (error) {
console.log(` ${issueAuthor} is not an organization member, checking codeowners...`);
}
// If not an org member, check if they're a codeowner
try {
// Fetch CODEOWNERS file from the repository
const { data: codeownersFile } = await github.rest.repos.getContent({
owner: context.repo.owner,
repo: context.repo.repo,
path: 'CODEOWNERS',
ref: 'dev'
});
// Decode the content (it's base64 encoded)
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf-8');
// Check if the issue author is mentioned in CODEOWNERS
// GitHub usernames in CODEOWNERS are prefixed with @
if (codeownersContent.includes(`@${issueAuthor}`)) {
console.log(`✅ ${issueAuthor} is a integration code owner`);
return; // Authorized
}
} catch (error) {
console.error('Error checking CODEOWNERS:', error);
}
// If we reach here, user is not authorized
console.log(`❌ ${issueAuthor} is not authorized to create Task issues`);
// Close the issue with a comment
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: `Hi @${issueAuthor}, thank you for your contribution!\n\n` +
`Task issues are restricted to Open Home Foundation staff, authorized contributors, and integration code owners.\n\n` +
`If you would like to:\n` +
`- Report a bug: Please use the [bug report form](https://github.com/home-assistant/core/issues/new?template=bug_report.yml)\n` +
`- Request a feature: Please submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)\n\n` +
`If you believe you should have access to create Task issues, please contact the maintainers.`
});
await github.rest.issues.update({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
state: 'closed'
});
// Add a label to indicate this was auto-closed
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
labels: ['auto-closed']
});

View File

@ -159,7 +159,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2025.07.0
uses: home-assistant/wheels@2025.03.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@ -219,7 +219,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2025.07.0
uses: home-assistant/wheels@2025.03.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2

6
.gitignore vendored
View File

@ -137,8 +137,4 @@ tmp_cache
.ropeproject
# Will be created from script/split_tests.py
pytest_buckets.txt
# AI tooling
.claude
pytest_buckets.txt

View File

@ -1,8 +1,8 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.1
rev: v0.11.0
hooks:
- id: ruff-check
- id: ruff
args:
- --fix
- id: ruff-format
@ -30,7 +30,7 @@ repos:
- --branch=master
- --branch=rc
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.37.1
rev: v1.35.1
hooks:
- id: yamllint
- repo: https://github.com/pre-commit/mirrors-prettier

View File

@ -53,7 +53,6 @@ homeassistant.components.air_quality.*
homeassistant.components.airgradient.*
homeassistant.components.airly.*
homeassistant.components.airnow.*
homeassistant.components.airos.*
homeassistant.components.airq.*
homeassistant.components.airthings.*
homeassistant.components.airthings_ble.*
@ -66,9 +65,8 @@ homeassistant.components.aladdin_connect.*
homeassistant.components.alarm_control_panel.*
homeassistant.components.alert.*
homeassistant.components.alexa.*
homeassistant.components.alexa_devices.*
homeassistant.components.alpha_vantage.*
homeassistant.components.altruist.*
homeassistant.components.amazon_devices.*
homeassistant.components.amazon_polly.*
homeassistant.components.amberelectric.*
homeassistant.components.ambient_network.*
@ -378,12 +376,10 @@ homeassistant.components.onedrive.*
homeassistant.components.onewire.*
homeassistant.components.onkyo.*
homeassistant.components.open_meteo.*
homeassistant.components.open_router.*
homeassistant.components.openai_conversation.*
homeassistant.components.openexchangerates.*
homeassistant.components.opensky.*
homeassistant.components.openuv.*
homeassistant.components.opower.*
homeassistant.components.oralb.*
homeassistant.components.otbr.*
homeassistant.components.overkiz.*
@ -502,12 +498,10 @@ homeassistant.components.tag.*
homeassistant.components.tailscale.*
homeassistant.components.tailwind.*
homeassistant.components.tami4.*
homeassistant.components.tankerkoenig.*
homeassistant.components.tautulli.*
homeassistant.components.tcp.*
homeassistant.components.technove.*
homeassistant.components.tedee.*
homeassistant.components.telegram_bot.*
homeassistant.components.text.*
homeassistant.components.thethingsnetwork.*
homeassistant.components.threshold.*
@ -538,7 +532,6 @@ homeassistant.components.unifiprotect.*
homeassistant.components.upcloud.*
homeassistant.components.update.*
homeassistant.components.uptime.*
homeassistant.components.uptime_kuma.*
homeassistant.components.uptimerobot.*
homeassistant.components.usb.*
homeassistant.components.uvc.*
@ -548,7 +541,6 @@ homeassistant.components.valve.*
homeassistant.components.velbus.*
homeassistant.components.vlc_telnet.*
homeassistant.components.vodafone_station.*
homeassistant.components.volvo.*
homeassistant.components.wake_on_lan.*
homeassistant.components.wake_word.*
homeassistant.components.wallbox.*

2
.vscode/tasks.json vendored
View File

@ -45,7 +45,7 @@
{
"label": "Ruff",
"type": "shell",
"command": "pre-commit run ruff-check --all-files",
"command": "pre-commit run ruff --all-files",
"group": {
"kind": "test",
"isDefault": true

View File

@ -1 +0,0 @@
.github/copilot-instructions.md

46
CODEOWNERS generated
View File

@ -57,8 +57,6 @@ build.json @home-assistant/supervisor
/tests/components/aemet/ @Noltari
/homeassistant/components/agent_dvr/ @ispysoftware
/tests/components/agent_dvr/ @ispysoftware
/homeassistant/components/ai_task/ @home-assistant/core
/tests/components/ai_task/ @home-assistant/core
/homeassistant/components/air_quality/ @home-assistant/core
/tests/components/air_quality/ @home-assistant/core
/homeassistant/components/airgradient/ @airgradienthq @joostlek
@ -67,8 +65,6 @@ build.json @home-assistant/supervisor
/tests/components/airly/ @bieniu
/homeassistant/components/airnow/ @asymworks
/tests/components/airnow/ @asymworks
/homeassistant/components/airos/ @CoMPaTech
/tests/components/airos/ @CoMPaTech
/homeassistant/components/airq/ @Sibgatulin @dl2080
/tests/components/airq/ @Sibgatulin @dl2080
/homeassistant/components/airthings/ @danielhiversen @LaStrada
@ -93,10 +89,8 @@ build.json @home-assistant/supervisor
/tests/components/alert/ @home-assistant/core @frenck
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
/homeassistant/components/alexa_devices/ @chemelli74
/tests/components/alexa_devices/ @chemelli74
/homeassistant/components/altruist/ @airalab @LoSk-p
/tests/components/altruist/ @airalab @LoSk-p
/homeassistant/components/amazon_devices/ @chemelli74
/tests/components/amazon_devices/ @chemelli74
/homeassistant/components/amazon_polly/ @jschlyter
/homeassistant/components/amberelectric/ @madpilot
/tests/components/amberelectric/ @madpilot
@ -333,8 +327,8 @@ build.json @home-assistant/supervisor
/tests/components/demo/ @home-assistant/core
/homeassistant/components/denonavr/ @ol-iver @starkillerOG
/tests/components/denonavr/ @ol-iver @starkillerOG
/homeassistant/components/derivative/ @afaucogney @karwosts
/tests/components/derivative/ @afaucogney @karwosts
/homeassistant/components/derivative/ @afaucogney
/tests/components/derivative/ @afaucogney
/homeassistant/components/devialet/ @fwestenberg
/tests/components/devialet/ @fwestenberg
/homeassistant/components/device_automation/ @home-assistant/core
@ -454,8 +448,8 @@ build.json @home-assistant/supervisor
/tests/components/eq3btsmart/ @eulemitkeule @dbuezas
/homeassistant/components/escea/ @lazdavila
/tests/components/escea/ @lazdavila
/homeassistant/components/esphome/ @jesserockz @kbx81 @bdraco
/tests/components/esphome/ @jesserockz @kbx81 @bdraco
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
/tests/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
/homeassistant/components/eufylife_ble/ @bdr99
/tests/components/eufylife_ble/ @bdr99
/homeassistant/components/event/ @home-assistant/core
@ -686,8 +680,8 @@ build.json @home-assistant/supervisor
/tests/components/husqvarna_automower/ @Thomas55555
/homeassistant/components/husqvarna_automower_ble/ @alistair23
/tests/components/husqvarna_automower_ble/ @alistair23
/homeassistant/components/huum/ @frwickst @vincentwolsink
/tests/components/huum/ @frwickst @vincentwolsink
/homeassistant/components/huum/ @frwickst
/tests/components/huum/ @frwickst
/homeassistant/components/hvv_departures/ @vigonotion
/tests/components/hvv_departures/ @vigonotion
/homeassistant/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
@ -790,6 +784,8 @@ build.json @home-assistant/supervisor
/tests/components/jellyfin/ @RunC0deRun @ctalkington
/homeassistant/components/jewish_calendar/ @tsvi
/tests/components/jewish_calendar/ @tsvi
/homeassistant/components/juicenet/ @jesserockz
/tests/components/juicenet/ @jesserockz
/homeassistant/components/justnimbus/ @kvanzuijlen
/tests/components/justnimbus/ @kvanzuijlen
/homeassistant/components/jvc_projector/ @SteveEasley @msavazzi
@ -1104,8 +1100,6 @@ build.json @home-assistant/supervisor
/tests/components/onvif/ @hunterjm @jterrace
/homeassistant/components/open_meteo/ @frenck
/tests/components/open_meteo/ @frenck
/homeassistant/components/open_router/ @joostlek
/tests/components/open_router/ @joostlek
/homeassistant/components/openai_conversation/ @balloob
/tests/components/openai_conversation/ @balloob
/homeassistant/components/openerz/ @misialq
@ -1173,8 +1167,6 @@ build.json @home-assistant/supervisor
/tests/components/ping/ @jpbede
/homeassistant/components/plaato/ @JohNan
/tests/components/plaato/ @JohNan
/homeassistant/components/playstation_network/ @jackjpowell @tr4nt0r
/tests/components/playstation_network/ @jackjpowell @tr4nt0r
/homeassistant/components/plex/ @jjlawren
/tests/components/plex/ @jjlawren
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
@ -1282,8 +1274,8 @@ build.json @home-assistant/supervisor
/tests/components/rehlko/ @bdraco @peterager
/homeassistant/components/remote/ @home-assistant/core
/tests/components/remote/ @home-assistant/core
/homeassistant/components/remote_calendar/ @Thomas55555 @allenporter
/tests/components/remote_calendar/ @Thomas55555 @allenporter
/homeassistant/components/remote_calendar/ @Thomas55555
/tests/components/remote_calendar/ @Thomas55555
/homeassistant/components/renault/ @epenet
/tests/components/renault/ @epenet
/homeassistant/components/renson/ @jimmyd-be
@ -1557,8 +1549,6 @@ build.json @home-assistant/supervisor
/tests/components/technove/ @Moustachauve
/homeassistant/components/tedee/ @patrickhilker @zweckj
/tests/components/tedee/ @patrickhilker @zweckj
/homeassistant/components/telegram_bot/ @hanwg
/tests/components/telegram_bot/ @hanwg
/homeassistant/components/tellduslive/ @fredrike
/tests/components/tellduslive/ @fredrike
/homeassistant/components/template/ @Petro31 @home-assistant/core
@ -1588,8 +1578,6 @@ build.json @home-assistant/supervisor
/tests/components/tile/ @bachya
/homeassistant/components/tilt_ble/ @apt-itude
/tests/components/tilt_ble/ @apt-itude
/homeassistant/components/tilt_pi/ @michaelheyman
/tests/components/tilt_pi/ @michaelheyman
/homeassistant/components/time/ @home-assistant/core
/tests/components/time/ @home-assistant/core
/homeassistant/components/time_date/ @fabaff
@ -1662,8 +1650,6 @@ build.json @home-assistant/supervisor
/tests/components/upnp/ @StevenLooman
/homeassistant/components/uptime/ @frenck
/tests/components/uptime/ @frenck
/homeassistant/components/uptime_kuma/ @tr4nt0r
/tests/components/uptime_kuma/ @tr4nt0r
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
/tests/components/uptimerobot/ @ludeeus @chemelli74
/homeassistant/components/usb/ @bdraco
@ -1680,8 +1666,6 @@ build.json @home-assistant/supervisor
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
/homeassistant/components/valve/ @home-assistant/core
/tests/components/valve/ @home-assistant/core
/homeassistant/components/vegehub/ @ghowevege
/tests/components/vegehub/ @ghowevege
/homeassistant/components/velbus/ @Cereal2nd @brefra
/tests/components/velbus/ @Cereal2nd @brefra
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
@ -1708,8 +1692,6 @@ build.json @home-assistant/supervisor
/tests/components/voip/ @balloob @synesthesiam @jaminh
/homeassistant/components/volumio/ @OnFreund
/tests/components/volumio/ @OnFreund
/homeassistant/components/volvo/ @thomasddn
/tests/components/volvo/ @thomasddn
/homeassistant/components/volvooncall/ @molobrakos
/tests/components/volvooncall/ @molobrakos
/homeassistant/components/vulcan/ @Antoni-Czaplicki
@ -1764,8 +1746,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/wirelesstag/ @sergeymaysak
/homeassistant/components/withings/ @joostlek
/tests/components/withings/ @joostlek
/homeassistant/components/wiz/ @sbidy @arturpragacz
/tests/components/wiz/ @sbidy @arturpragacz
/homeassistant/components/wiz/ @sbidy
/tests/components/wiz/ @sbidy
/homeassistant/components/wled/ @frenck
/tests/components/wled/ @frenck
/homeassistant/components/wmspro/ @mback2k

View File

@ -1,7 +1,15 @@
FROM mcr.microsoft.com/vscode/devcontainers/base:debian
FROM mcr.microsoft.com/devcontainers/python:1-3.13
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Uninstall pre-installed formatting and linting tools
# They would conflict with our pinned versions
RUN \
pipx uninstall pydocstyle \
&& pipx uninstall pycodestyle \
&& pipx uninstall mypy \
&& pipx uninstall pylint
RUN \
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
&& apt-get update \
@ -24,18 +32,21 @@ RUN \
libxml2 \
git \
cmake \
autoconf \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Add go2rtc binary
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
# Install uv
RUN pip3 install uv
WORKDIR /usr/src
COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
RUN uv python install 3.13.2
# Setup hass-release
RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
&& uv pip install --system -e hass-release/ \
&& chown -R vscode /usr/src/hass-release/data
USER vscode
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
@ -44,10 +55,6 @@ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
WORKDIR /tmp
# Setup hass-release
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
&& uv pip install -e ~/hass-release/
# Install Python dependencies from requirements
COPY requirements.txt ./
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
@ -58,4 +65,4 @@ RUN uv pip install -r requirements_test.txt
WORKDIR /workspaces
# Set the default shell to bash instead of sh
ENV SHELL=/bin/bash
ENV SHELL /bin/bash

View File

@ -38,7 +38,8 @@ def validate_python() -> None:
def ensure_config_path(config_dir: str) -> None:
"""Validate the configuration directory."""
from . import config as config_util # noqa: PLC0415
# pylint: disable-next=import-outside-toplevel
from . import config as config_util
lib_dir = os.path.join(config_dir, "deps")
@ -79,7 +80,8 @@ def ensure_config_path(config_dir: str) -> None:
def get_arguments() -> argparse.Namespace:
"""Get parsed passed in arguments."""
from . import config as config_util # noqa: PLC0415
# pylint: disable-next=import-outside-toplevel
from . import config as config_util
parser = argparse.ArgumentParser(
description="Home Assistant: Observe, Control, Automate.",
@ -175,7 +177,8 @@ def main() -> int:
validate_os()
if args.script is not None:
from . import scripts # noqa: PLC0415
# pylint: disable-next=import-outside-toplevel
from . import scripts
return scripts.run(args.script)
@ -185,7 +188,8 @@ def main() -> int:
ensure_config_path(config_dir)
from . import config, runner # noqa: PLC0415
# pylint: disable-next=import-outside-toplevel
from . import config, runner
safe_mode = config.safe_mode_enabled(config_dir)

View File

@ -52,28 +52,28 @@ _LOGGER = logging.getLogger(__name__)
def _generate_secret() -> str:
"""Generate a secret."""
import pyotp # noqa: PLC0415
import pyotp # pylint: disable=import-outside-toplevel
return str(pyotp.random_base32())
def _generate_random() -> int:
"""Generate a 32 digit number."""
import pyotp # noqa: PLC0415
import pyotp # pylint: disable=import-outside-toplevel
return int(pyotp.random_base32(length=32, chars=list("1234567890")))
def _generate_otp(secret: str, count: int) -> str:
"""Generate one time password."""
import pyotp # noqa: PLC0415
import pyotp # pylint: disable=import-outside-toplevel
return str(pyotp.HOTP(secret).at(count))
def _verify_otp(secret: str, otp: str, count: int) -> bool:
"""Verify one time password."""
import pyotp # noqa: PLC0415
import pyotp # pylint: disable=import-outside-toplevel
return bool(pyotp.HOTP(secret).verify(otp, count))

View File

@ -37,7 +37,7 @@ DUMMY_SECRET = "FPPTH34D4E3MI2HG"
def _generate_qr_code(data: str) -> str:
"""Generate a base64 PNG string represent QR Code image of data."""
import pyqrcode # noqa: PLC0415
import pyqrcode # pylint: disable=import-outside-toplevel
qr_code = pyqrcode.create(data)
@ -59,7 +59,7 @@ def _generate_qr_code(data: str) -> str:
def _generate_secret_and_qr_code(username: str) -> tuple[str, str, str]:
"""Generate a secret, url, and QR code."""
import pyotp # noqa: PLC0415
import pyotp # pylint: disable=import-outside-toplevel
ota_secret = pyotp.random_base32()
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
@ -107,7 +107,7 @@ class TotpAuthModule(MultiFactorAuthModule):
def _add_ota_secret(self, user_id: str, secret: str | None = None) -> str:
"""Create a ota_secret for user."""
import pyotp # noqa: PLC0415
import pyotp # pylint: disable=import-outside-toplevel
ota_secret: str = secret or pyotp.random_base32()
@ -163,7 +163,7 @@ class TotpAuthModule(MultiFactorAuthModule):
def _validate_2fa(self, user_id: str, code: str) -> bool:
"""Validate two factor authentication code."""
import pyotp # noqa: PLC0415
import pyotp # pylint: disable=import-outside-toplevel
if (ota_secret := self._users.get(user_id)) is None: # type: ignore[union-attr]
# even we cannot find user, we still do verify
@ -196,7 +196,7 @@ class TotpSetupFlow(SetupFlow[TotpAuthModule]):
Return self.async_show_form(step_id='init') if user_input is None.
Return self.async_create_entry(data={'result': result}) if finish.
"""
import pyotp # noqa: PLC0415
import pyotp # pylint: disable=import-outside-toplevel
errors: dict[str, str] = {}

View File

@ -33,10 +33,7 @@ class AuthFlowContext(FlowContext, total=False):
redirect_uri: str
class AuthFlowResult(FlowResult[AuthFlowContext, tuple[str, str]], total=False):
"""Typed result dict for auth flow."""
result: Credentials # Only present if type is CREATE_ENTRY
AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]]
@attr.s(slots=True)

View File

@ -0,0 +1,29 @@
"""Enum backports from standard lib.
This file contained the backport of the StrEnum of Python 3.11.
Since we have dropped support for Python 3.10, we can remove this backport.
This file is kept for now to avoid breaking custom components that might
import it.
"""
from __future__ import annotations
from enum import StrEnum as _StrEnum
from functools import partial
from homeassistant.helpers.deprecation import (
DeprecatedAlias,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
# StrEnum deprecated as of 2024.5 use enum.StrEnum instead.
_DEPRECATED_StrEnum = DeprecatedAlias(_StrEnum, "enum.StrEnum", "2025.5")
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@ -0,0 +1,31 @@
"""Functools backports from standard lib.
This file contained the backport of the cached_property implementation of Python 3.12.
Since we have dropped support for Python 3.11, we can remove this backport.
This file is kept for now to avoid breaking custom components that might
import it.
"""
from __future__ import annotations
# pylint: disable-next=hass-deprecated-import
from functools import cached_property as _cached_property, partial
from homeassistant.helpers.deprecation import (
DeprecatedAlias,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
# cached_property deprecated as of 2024.5 use functools.cached_property instead.
_DEPRECATED_cached_property = DeprecatedAlias(
_cached_property, "functools.cached_property", "2025.5"
)
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@ -75,8 +75,8 @@ from .core_config import async_process_ha_core_config
from .exceptions import HomeAssistantError
from .helpers import (
area_registry,
backup,
category_registry,
condition,
config_validation as cv,
device_registry,
entity,
@ -89,7 +89,6 @@ from .helpers import (
restore_state,
template,
translation,
trigger,
)
from .helpers.dispatcher import async_dispatcher_send_internal
from .helpers.storage import get_internal_store_manager
@ -172,6 +171,8 @@ FRONTEND_INTEGRATIONS = {
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
# The substage containing recorder should have no timeout, as it could cancel a database migration.
# Recorder freezes "recorder" timeout during a migration, but it does not freeze other timeouts.
# The substages preceding it should also have no timeout, until we ensure that the recorder
# is not accidentally promoted as a dependency of any of the integrations in them.
# If we add timeouts to the frontend substages, we should make sure they don't apply in recovery mode.
STAGE_0_INTEGRATIONS = (
# Load logging and http deps as soon as possible
@ -332,9 +333,6 @@ async def async_setup_hass(
if not is_virtual_env():
await async_mount_local_lib_path(runtime_config.config_dir)
if hass.config.safe_mode:
_LOGGER.info("Starting in safe mode")
basic_setup_success = (
await async_from_config_dict(config_dict, hass) is not None
)
@ -387,6 +385,8 @@ async def async_setup_hass(
{"recovery_mode": {}, "http": http_conf},
hass,
)
elif hass.config.safe_mode:
_LOGGER.info("Starting in safe mode")
if runtime_config.open_ui:
hass.add_job(open_hass_ui, hass)
@ -396,7 +396,7 @@ async def async_setup_hass(
def open_hass_ui(hass: core.HomeAssistant) -> None:
"""Open the UI."""
import webbrowser # noqa: PLC0415
import webbrowser # pylint: disable=import-outside-toplevel
if hass.config.api is None or "frontend" not in hass.config.components:
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
@ -454,8 +454,6 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
create_eager_task(restore_state.async_load(hass)),
create_eager_task(hass.config_entries.async_initialize()),
create_eager_task(async_get_system_info(hass)),
create_eager_task(condition.async_setup(hass)),
create_eager_task(trigger.async_setup(hass)),
)
@ -565,7 +563,8 @@ async def async_enable_logging(
if not log_no_color:
try:
from colorlog import ColoredFormatter # noqa: PLC0415
# pylint: disable-next=import-outside-toplevel
from colorlog import ColoredFormatter
# basicConfig must be called after importing colorlog in order to
# ensure that the handlers it sets up wraps the correct streams.
@ -695,10 +694,10 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
"""Get domains of components to set up."""
# The common config section [homeassistant] could be filtered here,
# but that is not necessary, since it corresponds to the core integration,
# that is always unconditionally loaded.
domains = {cv.domain_key(key) for key in config}
# Filter out the repeating and common config section [homeassistant]
domains = {
domain for key in config if (domain := cv.domain_key(key)) != core.DOMAIN
}
# Add config entry and default domains
if not hass.config.recovery_mode:
@ -726,28 +725,34 @@ async def _async_resolve_domains_and_preload(
together with all their dependencies.
"""
domains_to_setup = _get_domains(hass, config)
# Also process all base platforms since we do not require the manifest
# to list them as dependencies.
# We want to later avoid lock contention when multiple integrations try to load
# their manifests at once.
platform_integrations = conf_util.extract_platform_integrations(
config, BASE_PLATFORMS
)
# Ensure base platforms that have platform integrations are added to `domains`,
# so they can be setup first instead of discovering them later when a config
# entry setup task notices that it's needed and there is already a long line
# to use the import executor.
#
# Additionally process integrations that are defined under base platforms
# to speed things up.
# For example if we have
# sensor:
# - platform: template
#
# `template` has to be loaded to validate the config for sensor.
# The more platforms under `sensor:`, the longer
# `template` has to be loaded to validate the config for sensor
# so we want to start loading `sensor` as soon as we know
# it will be needed. The more platforms under `sensor:`, the longer
# it will take to finish setup for `sensor` because each of these
# platforms has to be imported before we can validate the config.
#
# Thankfully we are migrating away from the platform pattern
# so this will be less of a problem in the future.
platform_integrations = conf_util.extract_platform_integrations(
config, BASE_PLATFORMS
)
domains_to_setup.update(platform_integrations)
# Additionally process base platforms since we do not require the manifest
# to list them as dependencies.
# We want to later avoid lock contention when multiple integrations try to load
# their manifests at once.
# Also process integrations that are defined under base platforms
# to speed things up.
additional_domains_to_process = {
*BASE_PLATFORMS,
*chain.from_iterable(platform_integrations.values()),
@ -865,9 +870,9 @@ async def _async_set_up_integrations(
domains = set(integrations) & all_domains
_LOGGER.info(
"Domains to be set up: %s\nDependencies: %s",
domains or "{}",
(all_domains - domains) or "{}",
"Domains to be set up: %s | %s",
domains,
all_domains - domains,
)
async_set_domains_to_be_loaded(hass, all_domains)
@ -876,6 +881,10 @@ async def _async_set_up_integrations(
if "recorder" in all_domains:
recorder.async_initialize_recorder(hass)
# Initialize backup
if "backup" in all_domains:
backup.async_initialize_backup(hass)
stages: list[tuple[str, set[str], int | None]] = [
*(
(name, domain_group, timeout)
@ -908,24 +917,19 @@ async def _async_set_up_integrations(
stage_all_domains = stage_domains | stage_dep_domains
_LOGGER.info(
"Setting up stage %s: %s; already set up: %s\n"
"Dependencies: %s; already set up: %s",
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
name,
stage_domains,
(stage_domains_unfiltered - stage_domains) or "{}",
stage_dep_domains or "{}",
(stage_dep_domains_unfiltered - stage_dep_domains) or "{}",
stage_domains_unfiltered - stage_domains,
stage_dep_domains,
stage_dep_domains_unfiltered - stage_dep_domains,
)
if timeout is None:
await _async_setup_multi_components(hass, stage_all_domains, config)
continue
try:
async with hass.timeout.async_timeout(
timeout,
cool_down=COOLDOWN_TIME,
cancel_message=f"Bootstrap stage {name} timeout",
):
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
await _async_setup_multi_components(hass, stage_all_domains, config)
except TimeoutError:
_LOGGER.warning(
@ -937,11 +941,7 @@ async def _async_set_up_integrations(
# Wrap up startup
_LOGGER.debug("Waiting for startup to wrap up")
try:
async with hass.timeout.async_timeout(
WRAP_UP_TIMEOUT,
cool_down=COOLDOWN_TIME,
cancel_message="Bootstrap startup wrap up timeout",
):
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
await hass.async_block_till_done()
except TimeoutError:
_LOGGER.warning(

View File

@ -3,7 +3,7 @@
"name": "Amazon",
"integrations": [
"alexa",
"alexa_devices",
"amazon_devices",
"amazon_polly",
"aws",
"aws_s3",

View File

@ -1,5 +0,0 @@
{
"domain": "frient",
"name": "Frient",
"iot_standards": ["zigbee"]
}

View File

@ -1,11 +1,5 @@
{
"domain": "sony",
"name": "Sony",
"integrations": [
"braviatv",
"ps4",
"sony_projector",
"songpal",
"playstation_network"
]
"integrations": ["braviatv", "ps4", "sony_projector", "songpal"]
}

View File

@ -1,6 +1,5 @@
{
"domain": "switchbot",
"name": "SwitchBot",
"integrations": ["switchbot", "switchbot_cloud"],
"iot_standards": ["matter"]
"integrations": ["switchbot", "switchbot_cloud"]
}

View File

@ -1,5 +1,5 @@
{
"domain": "third_reality",
"name": "Third Reality",
"iot_standards": ["matter", "zigbee"]
"iot_standards": ["zigbee"]
}

View File

@ -1,5 +0,0 @@
{
"domain": "tilt",
"name": "Tilt",
"integrations": ["tilt_ble", "tilt_pi"]
}

View File

@ -1,5 +1,5 @@
{
"domain": "ubiquiti",
"name": "Ubiquiti",
"integrations": ["airos", "unifi", "unifi_direct", "unifiled", "unifiprotect"]
"integrations": ["unifi", "unifi_direct", "unifiled", "unifiprotect"]
}

View File

@ -14,24 +14,30 @@ from jaraco.abode.exceptions import (
)
from jaraco.abode.helpers.timeline import Groups as GROUPS
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_DATE,
ATTR_DEVICE_ID,
ATTR_ENTITY_ID,
ATTR_TIME,
CONF_PASSWORD,
CONF_USERNAME,
EVENT_HOMEASSISTANT_STOP,
Platform,
)
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.typing import ConfigType
from .const import CONF_POLLING, DOMAIN, LOGGER
from .services import async_setup_services
SERVICE_SETTINGS = "change_setting"
SERVICE_CAPTURE_IMAGE = "capture_image"
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
ATTR_DEVICE_NAME = "device_name"
ATTR_DEVICE_TYPE = "device_type"
@ -39,12 +45,22 @@ ATTR_EVENT_CODE = "event_code"
ATTR_EVENT_NAME = "event_name"
ATTR_EVENT_TYPE = "event_type"
ATTR_EVENT_UTC = "event_utc"
ATTR_SETTING = "setting"
ATTR_USER_NAME = "user_name"
ATTR_APP_TYPE = "app_type"
ATTR_EVENT_BY = "event_by"
ATTR_VALUE = "value"
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
CHANGE_SETTING_SCHEMA = vol.Schema(
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
)
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
PLATFORMS = [
Platform.ALARM_CONTROL_PANEL,
Platform.BINARY_SENSOR,
@ -69,7 +85,7 @@ class AbodeSystem:
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Abode component."""
async_setup_services(hass)
setup_hass_services(hass)
return True
@ -122,6 +138,60 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
return unload_ok
def setup_hass_services(hass: HomeAssistant) -> None:
"""Home Assistant services."""
def change_setting(call: ServiceCall) -> None:
"""Change an Abode system setting."""
setting = call.data[ATTR_SETTING]
value = call.data[ATTR_VALUE]
try:
hass.data[DOMAIN].abode.set_setting(setting, value)
except AbodeException as ex:
LOGGER.warning(ex)
def capture_image(call: ServiceCall) -> None:
"""Capture a new image."""
entity_ids = call.data[ATTR_ENTITY_ID]
target_entities = [
entity_id
for entity_id in hass.data[DOMAIN].entity_ids
if entity_id in entity_ids
]
for entity_id in target_entities:
signal = f"abode_camera_capture_{entity_id}"
dispatcher_send(hass, signal)
def trigger_automation(call: ServiceCall) -> None:
"""Trigger an Abode automation."""
entity_ids = call.data[ATTR_ENTITY_ID]
target_entities = [
entity_id
for entity_id in hass.data[DOMAIN].entity_ids
if entity_id in entity_ids
]
for entity_id in target_entities:
signal = f"abode_trigger_automation_{entity_id}"
dispatcher_send(hass, signal)
hass.services.async_register(
DOMAIN, SERVICE_SETTINGS, change_setting, schema=CHANGE_SETTING_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image, schema=CAPTURE_IMAGE_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_TRIGGER_AUTOMATION, trigger_automation, schema=AUTOMATION_SCHEMA
)
async def setup_hass_events(hass: HomeAssistant) -> None:
"""Home Assistant start and stop callbacks."""

View File

@ -1,90 +0,0 @@
"""Support for the Abode Security System."""
from __future__ import annotations
from jaraco.abode.exceptions import Exception as AbodeException
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
from .const import DOMAIN, LOGGER
SERVICE_SETTINGS = "change_setting"
SERVICE_CAPTURE_IMAGE = "capture_image"
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
ATTR_SETTING = "setting"
ATTR_VALUE = "value"
CHANGE_SETTING_SCHEMA = vol.Schema(
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
)
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
def _change_setting(call: ServiceCall) -> None:
"""Change an Abode system setting."""
setting = call.data[ATTR_SETTING]
value = call.data[ATTR_VALUE]
try:
call.hass.data[DOMAIN].abode.set_setting(setting, value)
except AbodeException as ex:
LOGGER.warning(ex)
def _capture_image(call: ServiceCall) -> None:
"""Capture a new image."""
entity_ids = call.data[ATTR_ENTITY_ID]
target_entities = [
entity_id
for entity_id in call.hass.data[DOMAIN].entity_ids
if entity_id in entity_ids
]
for entity_id in target_entities:
signal = f"abode_camera_capture_{entity_id}"
dispatcher_send(call.hass, signal)
def _trigger_automation(call: ServiceCall) -> None:
"""Trigger an Abode automation."""
entity_ids = call.data[ATTR_ENTITY_ID]
target_entities = [
entity_id
for entity_id in call.hass.data[DOMAIN].entity_ids
if entity_id in entity_ids
]
for entity_id in target_entities:
signal = f"abode_trigger_automation_{entity_id}"
dispatcher_send(call.hass, signal)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Home Assistant services."""
hass.services.async_register(
DOMAIN, SERVICE_SETTINGS, _change_setting, schema=CHANGE_SETTING_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_CAPTURE_IMAGE, _capture_image, schema=CAPTURE_IMAGE_SCHEMA
)
hass.services.async_register(
DOMAIN,
SERVICE_TRIGGER_AUTOMATION,
_trigger_automation,
schema=AUTOMATION_SCHEMA,
)

View File

@ -8,7 +8,7 @@ from homeassistant.core import HomeAssistant
from .const import CONNECTION_TYPE, LOCAL
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR]
PLATFORMS = [Platform.CLIMATE]
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:

View File

@ -41,30 +41,7 @@ class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
"""Fetch data from the Adax."""
try:
if hasattr(self.adax_data_handler, "fetch_rooms_info"):
rooms = await self.adax_data_handler.fetch_rooms_info() or []
_LOGGER.debug("fetch_rooms_info returned: %s", rooms)
else:
_LOGGER.debug("fetch_rooms_info method not available, using get_rooms")
rooms = []
if not rooms:
_LOGGER.debug(
"No rooms from fetch_rooms_info, trying get_rooms as fallback"
)
rooms = await self.adax_data_handler.get_rooms() or []
_LOGGER.debug("get_rooms fallback returned: %s", rooms)
if not rooms:
raise UpdateFailed("No rooms available from Adax API")
except OSError as e:
raise UpdateFailed(f"Error communicating with API: {e}") from e
for room in rooms:
room["energyWh"] = int(room.get("energyWh", 0))
rooms = await self.adax_data_handler.get_rooms() or []
return {r["id"]: r for r in rooms}

View File

@ -1,77 +0,0 @@
"""Support for Adax energy sensors."""
from __future__ import annotations
from typing import cast
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorStateClass,
)
from homeassistant.const import UnitOfEnergy
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AdaxConfigEntry
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
from .coordinator import AdaxCloudCoordinator
async def async_setup_entry(
hass: HomeAssistant,
entry: AdaxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Adax energy sensors with config flow."""
if entry.data.get(CONNECTION_TYPE) != LOCAL:
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
# Create individual energy sensors for each device
async_add_entities(
AdaxEnergySensor(cloud_coordinator, device_id)
for device_id in cloud_coordinator.data
)
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
"""Representation of an Adax energy sensor."""
_attr_has_entity_name = True
_attr_translation_key = "energy"
_attr_device_class = SensorDeviceClass.ENERGY
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
_attr_state_class = SensorStateClass.TOTAL_INCREASING
_attr_suggested_display_precision = 3
def __init__(
self,
coordinator: AdaxCloudCoordinator,
device_id: str,
) -> None:
"""Initialize the energy sensor."""
super().__init__(coordinator)
self._device_id = device_id
room = coordinator.data[device_id]
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device_id)},
name=room["name"],
manufacturer="Adax",
)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available and "energyWh" in self.coordinator.data[self._device_id]
)
@property
def native_value(self) -> int:
"""Return the native value of the sensor."""
return int(self.coordinator.data[self._device_id]["energyWh"])

View File

@ -185,7 +185,6 @@ FORECAST_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
keys=[AOD_TOWN, AOD_FORECAST_DAILY, AOD_FORECAST_CURRENT, AOD_WIND_DIRECTION],
name="Daily forecast wind bearing",
native_unit_of_measurement=DEGREE,
device_class=SensorDeviceClass.WIND_DIRECTION,
),
AemetSensorEntityDescription(
entity_registry_enabled_default=False,
@ -193,7 +192,6 @@ FORECAST_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
keys=[AOD_TOWN, AOD_FORECAST_HOURLY, AOD_FORECAST_CURRENT, AOD_WIND_DIRECTION],
name="Hourly forecast wind bearing",
native_unit_of_measurement=DEGREE,
device_class=SensorDeviceClass.WIND_DIRECTION,
),
AemetSensorEntityDescription(
entity_registry_enabled_default=False,
@ -336,8 +334,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
keys=[AOD_WEATHER, AOD_WIND_DIRECTION],
name="Wind bearing",
native_unit_of_measurement=DEGREE,
state_class=SensorStateClass.MEASUREMENT_ANGLE,
device_class=SensorDeviceClass.WIND_DIRECTION,
state_class=SensorStateClass.MEASUREMENT,
),
AemetSensorEntityDescription(
key=ATTR_API_WIND_MAX_SPEED,

View File

@ -15,7 +15,7 @@ from homeassistant.helpers.entity_platform import (
)
from . import AgentDVRConfigEntry
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN as AGENT_DOMAIN
SCAN_INTERVAL = timedelta(seconds=CAMERA_SCAN_INTERVAL_SECS)
@ -82,7 +82,7 @@ class AgentCamera(MjpegCamera):
still_image_url=f"{device.client._server_url}{device.still_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}", # noqa: SLF001
)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self.unique_id)},
identifiers={(AGENT_DOMAIN, self.unique_id)},
manufacturer="Agent",
model="Camera",
name=f"{device.client.name} {device.name}",

View File

@ -1,166 +0,0 @@
"""Integration to offer AI tasks to Home Assistant."""
import logging
from typing import Any
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
from homeassistant.core import (
HassJobType,
HomeAssistant,
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.helpers import config_validation as cv, selector, storage
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
from .const import (
ATTR_ATTACHMENTS,
ATTR_INSTRUCTIONS,
ATTR_REQUIRED,
ATTR_STRUCTURE,
ATTR_TASK_NAME,
DATA_COMPONENT,
DATA_PREFERENCES,
DOMAIN,
SERVICE_GENERATE_DATA,
AITaskEntityFeature,
)
from .entity import AITaskEntity
from .http import async_setup as async_setup_http
from .task import GenDataTask, GenDataTaskResult, async_generate_data
__all__ = [
"DOMAIN",
"AITaskEntity",
"AITaskEntityFeature",
"GenDataTask",
"GenDataTaskResult",
"async_generate_data",
"async_setup",
"async_setup_entry",
"async_unload_entry",
]
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
STRUCTURE_FIELD_SCHEMA = vol.Schema(
{
vol.Optional(CONF_DESCRIPTION): str,
vol.Optional(ATTR_REQUIRED): bool,
vol.Required(CONF_SELECTOR): selector.validate_selector,
}
)
def _validate_structure_fields(value: dict[str, Any]) -> vol.Schema:
"""Validate the structure fields as a voluptuous Schema."""
if not isinstance(value, dict):
raise vol.Invalid("Structure must be a dictionary")
fields = {}
for k, v in value.items():
field_class = vol.Required if v.get(ATTR_REQUIRED, False) else vol.Optional
fields[field_class(k, description=v.get(CONF_DESCRIPTION))] = selector.selector(
v[CONF_SELECTOR]
)
return vol.Schema(fields, extra=vol.PREVENT_EXTRA)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Register the process service."""
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
await hass.data[DATA_PREFERENCES].async_load()
async_setup_http(hass)
hass.services.async_register(
DOMAIN,
SERVICE_GENERATE_DATA,
async_service_generate_data,
schema=vol.Schema(
{
vol.Required(ATTR_TASK_NAME): cv.string,
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_INSTRUCTIONS): cv.string,
vol.Optional(ATTR_STRUCTURE): vol.All(
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
_validate_structure_fields,
),
vol.Optional(ATTR_ATTACHMENTS): vol.All(
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
),
}
),
supports_response=SupportsResponse.ONLY,
job_type=HassJobType.Coroutinefunction,
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
async def async_service_generate_data(call: ServiceCall) -> ServiceResponse:
"""Run the run task service."""
result = await async_generate_data(hass=call.hass, **call.data)
return result.as_dict()
class AITaskPreferences:
"""AI Task preferences."""
KEYS = ("gen_data_entity_id",)
gen_data_entity_id: str | None = None
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the preferences."""
self._store: storage.Store[dict[str, str | None]] = storage.Store(
hass, 1, DOMAIN
)
async def async_load(self) -> None:
"""Load the data from the store."""
data = await self._store.async_load()
if data is None:
return
for key in self.KEYS:
setattr(self, key, data[key])
@callback
def async_set_preferences(
self,
*,
gen_data_entity_id: str | None | UndefinedType = UNDEFINED,
) -> None:
"""Set the preferences."""
changed = False
for key, value in (("gen_data_entity_id", gen_data_entity_id),):
if value is not UNDEFINED:
if getattr(self, key) != value:
setattr(self, key, value)
changed = True
if not changed:
return
self._store.async_delay_save(self.as_dict, 10)
@callback
def as_dict(self) -> dict[str, str | None]:
"""Get the current preferences."""
return {key: getattr(self, key) for key in self.KEYS}

View File

@ -1,40 +0,0 @@
"""Constants for the AI Task integration."""
from __future__ import annotations
from enum import IntFlag
from typing import TYPE_CHECKING, Final
from homeassistant.util.hass_dict import HassKey
if TYPE_CHECKING:
from homeassistant.helpers.entity_component import EntityComponent
from . import AITaskPreferences
from .entity import AITaskEntity
DOMAIN = "ai_task"
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
SERVICE_GENERATE_DATA = "generate_data"
ATTR_INSTRUCTIONS: Final = "instructions"
ATTR_TASK_NAME: Final = "task_name"
ATTR_STRUCTURE: Final = "structure"
ATTR_REQUIRED: Final = "required"
ATTR_ATTACHMENTS: Final = "attachments"
DEFAULT_SYSTEM_PROMPT = (
"You are a Home Assistant expert and help users with their tasks."
)
class AITaskEntityFeature(IntFlag):
"""Supported features of the AI task entity."""
GENERATE_DATA = 1
"""Generate data based on instructions."""
SUPPORT_ATTACHMENTS = 2
"""Support attachments with generate data."""

View File

@ -1,106 +0,0 @@
"""Entity for the AI Task integration."""
from collections.abc import AsyncGenerator
import contextlib
from typing import final
from propcache.api import cached_property
from homeassistant.components.conversation import (
ChatLog,
UserContent,
async_get_chat_log,
)
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.helpers import llm
from homeassistant.helpers.chat_session import ChatSession
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.util import dt as dt_util
from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN, AITaskEntityFeature
from .task import GenDataTask, GenDataTaskResult
class AITaskEntity(RestoreEntity):
"""Entity that supports conversations."""
_attr_should_poll = False
_attr_supported_features = AITaskEntityFeature(0)
__last_activity: str | None = None
@property
@final
def state(self) -> str | None:
"""Return the state of the entity."""
if self.__last_activity is None:
return None
return self.__last_activity
@cached_property
def supported_features(self) -> AITaskEntityFeature:
"""Flag supported features."""
return self._attr_supported_features
async def async_internal_added_to_hass(self) -> None:
"""Call when the entity is added to hass."""
await super().async_internal_added_to_hass()
state = await self.async_get_last_state()
if (
state is not None
and state.state is not None
and state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
):
self.__last_activity = state.state
@final
@contextlib.asynccontextmanager
async def _async_get_ai_task_chat_log(
self,
session: ChatSession,
task: GenDataTask,
) -> AsyncGenerator[ChatLog]:
"""Context manager used to manage the ChatLog used during an AI Task."""
# pylint: disable-next=contextmanager-generator-missing-cleanup
with (
async_get_chat_log(
self.hass,
session,
None,
) as chat_log,
):
await chat_log.async_provide_llm_data(
llm.LLMContext(
platform=self.platform.domain,
context=None,
language=None,
assistant=DOMAIN,
device_id=None,
),
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
)
chat_log.async_add_user_content(
UserContent(task.instructions, attachments=task.attachments)
)
yield chat_log
@final
async def internal_async_generate_data(
self,
session: ChatSession,
task: GenDataTask,
) -> GenDataTaskResult:
"""Run a gen data task."""
self.__last_activity = dt_util.utcnow().isoformat()
self.async_write_ha_state()
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
return await self._async_generate_data(task, chat_log)
async def _async_generate_data(
self,
task: GenDataTask,
chat_log: ChatLog,
) -> GenDataTaskResult:
"""Handle a gen data task."""
raise NotImplementedError

View File

@ -1,54 +0,0 @@
"""HTTP endpoint for AI Task integration."""
from typing import Any
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.core import HomeAssistant, callback
from .const import DATA_PREFERENCES
@callback
def async_setup(hass: HomeAssistant) -> None:
"""Set up the HTTP API for the conversation integration."""
websocket_api.async_register_command(hass, websocket_get_preferences)
websocket_api.async_register_command(hass, websocket_set_preferences)
@websocket_api.websocket_command(
{
vol.Required("type"): "ai_task/preferences/get",
}
)
@callback
def websocket_get_preferences(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Get AI task preferences."""
preferences = hass.data[DATA_PREFERENCES]
connection.send_result(msg["id"], preferences.as_dict())
@websocket_api.websocket_command(
{
vol.Required("type"): "ai_task/preferences/set",
vol.Optional("gen_data_entity_id"): vol.Any(str, None),
}
)
@websocket_api.require_admin
@callback
def websocket_set_preferences(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Set AI task preferences."""
preferences = hass.data[DATA_PREFERENCES]
msg.pop("type")
msg_id = msg.pop("id")
preferences.async_set_preferences(**msg)
connection.send_result(msg_id, preferences.as_dict())

View File

@ -1,7 +0,0 @@
{
"services": {
"generate_data": {
"service": "mdi:file-star-four-points-outline"
}
}
}

View File

@ -1,10 +0,0 @@
{
"domain": "ai_task",
"name": "AI Task",
"after_dependencies": ["camera"],
"codeowners": ["@home-assistant/core"],
"dependencies": ["conversation", "media_source"],
"documentation": "https://www.home-assistant.io/integrations/ai_task",
"integration_type": "system",
"quality_scale": "internal"
}

View File

@ -1,33 +0,0 @@
generate_data:
fields:
task_name:
example: "home summary"
required: true
selector:
text:
instructions:
example: "Generate a funny notification that the garage door was left open"
required: true
selector:
text:
multiline: true
entity_id:
required: false
selector:
entity:
filter:
domain: ai_task
supported_features:
- ai_task.AITaskEntityFeature.GENERATE_DATA
structure:
advanced: true
required: false
example: '{ "name": { "selector": { "text": }, "description": "Name of the user", "required": "True" } } }, "age": { "selector": { "number": }, "description": "Age of the user" } }'
selector:
object:
attachments:
required: false
selector:
media:
accept:
- "*"

View File

@ -1,30 +0,0 @@
{
"services": {
"generate_data": {
"name": "Generate data",
"description": "Uses AI to run a task that generates data.",
"fields": {
"task_name": {
"name": "Task name",
"description": "Name of the task."
},
"instructions": {
"name": "Instructions",
"description": "Instructions on what needs to be done."
},
"entity_id": {
"name": "Entity ID",
"description": "Entity ID to run the task on. If not provided, the preferred entity will be used."
},
"structure": {
"name": "Structured output",
"description": "When set, the AI Task will output fields with this in structure. The structure is a dictionary where the keys are the field names and the values contain a 'description', a 'selector', and an optional 'required' field."
},
"attachments": {
"name": "Attachments",
"description": "List of files to attach for multi-modal AI analysis."
}
}
}
}
}

View File

@ -1,169 +0,0 @@
"""AI tasks to be handled by agents."""
from __future__ import annotations
from dataclasses import dataclass
import mimetypes
from pathlib import Path
import tempfile
from typing import Any
import voluptuous as vol
from homeassistant.components import camera, conversation, media_source
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.chat_session import async_get_chat_session
from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
def _save_camera_snapshot(image: camera.Image) -> Path:
"""Save camera snapshot to temp file."""
with tempfile.NamedTemporaryFile(
mode="wb",
suffix=mimetypes.guess_extension(image.content_type, False),
delete=False,
) as temp_file:
temp_file.write(image.content)
return Path(temp_file.name)
async def async_generate_data(
hass: HomeAssistant,
*,
task_name: str,
entity_id: str | None = None,
instructions: str,
structure: vol.Schema | None = None,
attachments: list[dict] | None = None,
) -> GenDataTaskResult:
"""Run a task in the AI Task integration."""
if entity_id is None:
entity_id = hass.data[DATA_PREFERENCES].gen_data_entity_id
if entity_id is None:
raise HomeAssistantError("No entity_id provided and no preferred entity set")
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
if entity is None:
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
if AITaskEntityFeature.GENERATE_DATA not in entity.supported_features:
raise HomeAssistantError(
f"AI Task entity {entity_id} does not support generating data"
)
# Resolve attachments
resolved_attachments: list[conversation.Attachment] = []
created_files: list[Path] = []
if (
attachments
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
):
raise HomeAssistantError(
f"AI Task entity {entity_id} does not support attachments"
)
for attachment in attachments or []:
media_content_id = attachment["media_content_id"]
# Special case for camera media sources
if media_content_id.startswith("media-source://camera/"):
# Extract entity_id from the media content ID
entity_id = media_content_id.removeprefix("media-source://camera/")
# Get snapshot from camera
image = await camera.async_get_image(hass, entity_id)
temp_filename = await hass.async_add_executor_job(
_save_camera_snapshot, image
)
created_files.append(temp_filename)
resolved_attachments.append(
conversation.Attachment(
media_content_id=media_content_id,
mime_type=image.content_type,
path=temp_filename,
)
)
else:
# Handle regular media sources
media = await media_source.async_resolve_media(hass, media_content_id, None)
if media.path is None:
raise HomeAssistantError(
"Only local attachments are currently supported"
)
resolved_attachments.append(
conversation.Attachment(
media_content_id=media_content_id,
mime_type=media.mime_type,
path=media.path,
)
)
with async_get_chat_session(hass) as session:
if created_files:
def cleanup_files() -> None:
"""Cleanup temporary files."""
for file in created_files:
file.unlink(missing_ok=True)
@callback
def cleanup_files_callback() -> None:
"""Cleanup temporary files."""
hass.async_add_executor_job(cleanup_files)
session.async_on_cleanup(cleanup_files_callback)
return await entity.internal_async_generate_data(
session,
GenDataTask(
name=task_name,
instructions=instructions,
structure=structure,
attachments=resolved_attachments or None,
),
)
@dataclass(slots=True)
class GenDataTask:
"""Gen data task to be processed."""
name: str
"""Name of the task."""
instructions: str
"""Instructions on what needs to be done."""
structure: vol.Schema | None = None
"""Optional structure for the data to be generated."""
attachments: list[conversation.Attachment] | None = None
"""List of attachments to go along the instructions."""
def __str__(self) -> str:
"""Return task as a string."""
return f"<GenDataTask {self.name}: {id(self)}>"
@dataclass(slots=True)
class GenDataTaskResult:
"""Result of gen data task."""
conversation_id: str
"""Unique identifier for the conversation."""
data: Any
"""Data generated by the task."""
def as_dict(self) -> dict[str, Any]:
"""Return result as a dict."""
return {
"conversation_id": self.conversation_id,
"data": self.data,
}

View File

@ -51,16 +51,9 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
async def _async_setup(self) -> None:
"""Set up the coordinator."""
try:
self._current_version = (
await self.client.get_current_measures()
).firmware_version
except AirGradientError as error:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_error",
translation_placeholders={"error": str(error)},
) from error
self._current_version = (
await self.client.get_current_measures()
).firmware_version
async def _async_update_data(self) -> AirGradientData:
try:

View File

@ -6,7 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/airgradient",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "platinum",
"requirements": ["airgradient==0.9.2"],
"zeroconf": ["_airgradient._tcp.local."]
}

View File

@ -14,9 +14,9 @@ rules:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
docs-high-level-description: todo
docs-installation-instructions: todo
docs-removal-instructions: todo
entity-event-setup:
status: exempt
comment: |
@ -34,7 +34,7 @@ rules:
docs-configuration-parameters:
status: exempt
comment: No options to configure
docs-installation-parameters: done
docs-installation-parameters: todo
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
@ -43,19 +43,23 @@ rules:
status: exempt
comment: |
This integration does not require authentication.
test-coverage: done
test-coverage: todo
# Gold
devices: done
diagnostics: done
discovery-update-info: done
discovery: done
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
discovery-update-info:
status: todo
comment: DHCP is still possible
discovery:
status: todo
comment: DHCP is still possible
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: |

View File

@ -39,14 +39,14 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
)
self._abort_if_unique_id_configured()
try:
location_point_valid = await check_location(
location_point_valid = await test_location(
websession,
user_input["api_key"],
user_input["latitude"],
user_input["longitude"],
)
if not location_point_valid:
location_nearest_valid = await check_location(
location_nearest_valid = await test_location(
websession,
user_input["api_key"],
user_input["latitude"],
@ -88,7 +88,7 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
)
async def check_location(
async def test_location(
client: ClientSession,
api_key: str,
latitude: float,

View File

@ -45,6 +45,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bo
# Store Entity and Initialize Platforms
entry.runtime_data = coordinator
# Listen for option changes
entry.async_on_unload(entry.add_update_listener(update_listener))
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Clean up unused device entries with no entities
@ -85,3 +88,8 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@ -13,7 +13,7 @@ from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithReload,
OptionsFlow,
)
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
from homeassistant.core import HomeAssistant, callback
@ -126,7 +126,7 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
return AirNowOptionsFlowHandler()
class AirNowOptionsFlowHandler(OptionsFlowWithReload):
class AirNowOptionsFlowHandler(OptionsFlow):
"""Handle an options flow for AirNow."""
async def async_step_init(

View File

@ -71,7 +71,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
data: dict[str, Any] = {}
data = {}
try:
obs = await self.airnow.observations.latLong(
self.latitude,

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airnow",
"iot_class": "cloud_polling",
"loggers": ["pyairnow"],
"requirements": ["pyairnow==1.3.1"]
"requirements": ["pyairnow==1.2.1"]
}

View File

@ -1,42 +0,0 @@
"""The Ubiquiti airOS integration."""
from __future__ import annotations
from airos.airos8 import AirOS
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
_PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Set up Ubiquiti airOS from a config entry."""
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(hass, verify_ssl=False)
airos_device = AirOS(
host=entry.data[CONF_HOST],
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=session,
)
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@ -1,82 +0,0 @@
"""Config flow for the Ubiquiti airOS integration."""
from __future__ import annotations
import logging
from typing import Any
from airos.exceptions import (
ConnectionAuthenticationError,
ConnectionSetupError,
DataMissingError,
DeviceConnectionError,
KeyDataMissingError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
from .coordinator import AirOS
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_USERNAME, default="ubnt"): str,
vol.Required(CONF_PASSWORD): str,
}
)
class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Ubiquiti airOS."""
VERSION = 1
async def async_step_user(
self,
user_input: dict[str, Any] | None = None,
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(self.hass, verify_ssl=False)
airos_device = AirOS(
host=user_input[CONF_HOST],
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=session,
)
try:
await airos_device.login()
airos_data = await airos_device.status()
except (
ConnectionSetupError,
DeviceConnectionError,
):
errors["base"] = "cannot_connect"
except (ConnectionAuthenticationError, DataMissingError):
errors["base"] = "invalid_auth"
except KeyDataMissingError:
errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=airos_data.host.hostname, data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)

View File

@ -1,9 +0,0 @@
"""Constants for the Ubiquiti airOS integration."""
from datetime import timedelta
DOMAIN = "airos"
SCAN_INTERVAL = timedelta(minutes=1)
MANUFACTURER = "Ubiquiti"

View File

@ -1,66 +0,0 @@
"""DataUpdateCoordinator for AirOS."""
from __future__ import annotations
import logging
from airos.airos8 import AirOS, AirOSData
from airos.exceptions import (
ConnectionAuthenticationError,
ConnectionSetupError,
DataMissingError,
DeviceConnectionError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSData]):
"""Class to manage fetching AirOS data from single endpoint."""
config_entry: AirOSConfigEntry
def __init__(
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS
) -> None:
"""Initialize the coordinator."""
self.airos_device = airos_device
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self) -> AirOSData:
"""Fetch data from AirOS."""
try:
await self.airos_device.login()
return await self.airos_device.status()
except (ConnectionAuthenticationError,) as err:
_LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryError(
translation_domain=DOMAIN, translation_key="invalid_auth"
) from err
except (ConnectionSetupError, DeviceConnectionError, TimeoutError) as err:
_LOGGER.error("Error connecting to airOS device: %s", err)
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from err
except (DataMissingError,) as err:
_LOGGER.error("Expected data not returned by airOS device: %s", err)
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="error_data_missing",
) from err

View File

@ -1,33 +0,0 @@
"""Diagnostics support for airOS."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from .coordinator import AirOSConfigEntry
IP_REDACT = ["addr", "ipaddr", "ip6addr", "lastip"] # IP related
HW_REDACT = ["apmac", "hwaddr", "mac"] # MAC address
TO_REDACT_HA = [CONF_HOST, CONF_PASSWORD]
TO_REDACT_AIROS = [
"hostname", # Prevent leaking device naming
"essid", # Network SSID
"lat", # GPS latitude to prevent exposing location data.
"lon", # GPS longitude to prevent exposing location data.
*HW_REDACT,
*IP_REDACT,
]
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: AirOSConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
return {
"entry_data": async_redact_data(entry.data, TO_REDACT_HA),
"data": async_redact_data(entry.runtime_data.data.to_dict(), TO_REDACT_AIROS),
}

View File

@ -1,36 +0,0 @@
"""Generic AirOS Entity Class."""
from __future__ import annotations
from homeassistant.const import CONF_HOST
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import AirOSDataUpdateCoordinator
class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
"""Represent a AirOS Entity."""
_attr_has_entity_name = True
def __init__(self, coordinator: AirOSDataUpdateCoordinator) -> None:
"""Initialise the gateway."""
super().__init__(coordinator)
airos_data = self.coordinator.data
configuration_url: str | None = (
f"https://{coordinator.config_entry.data[CONF_HOST]}"
)
self._attr_device_info = DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, airos_data.derived.mac)},
configuration_url=configuration_url,
identifiers={(DOMAIN, str(airos_data.host.device_id))},
manufacturer=MANUFACTURER,
model=airos_data.host.devmodel,
name=airos_data.host.hostname,
sw_version=airos_data.host.fwversion,
)

View File

@ -1,10 +0,0 @@
{
"domain": "airos",
"name": "Ubiquiti airOS",
"codeowners": ["@CoMPaTech"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.2.1"]
}

View File

@ -1,72 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: airOS does not have actions
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: airOS does not have actions
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: local_polling without events
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: airOS does not have actions
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow: todo
test-coverage: done
# Gold
devices: done
diagnostics: done
discovery-update-info: todo
discovery: todo
docs-data-update: done
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: todo
docs-troubleshooting: done
docs-use-cases: todo
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: todo
comment: prepared binary_sensors will provide this
entity-translations: done
exception-translations: done
icon-translations:
status: exempt
comment: no (custom) icons used or envisioned
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@ -1,152 +0,0 @@
"""AirOS Sensor component for Home Assistant."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
import logging
from airos.data import NetRole, WirelessMode
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import (
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS,
UnitOfDataRate,
UnitOfFrequency,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
from .entity import AirOSEntity
_LOGGER = logging.getLogger(__name__)
WIRELESS_MODE_OPTIONS = [mode.value.replace("-", "_").lower() for mode in WirelessMode]
NETROLE_OPTIONS = [mode.value for mode in NetRole]
@dataclass(frozen=True, kw_only=True)
class AirOSSensorEntityDescription(SensorEntityDescription):
"""Describe an AirOS sensor."""
value_fn: Callable[[AirOSData], StateType]
SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
AirOSSensorEntityDescription(
key="host_cpuload",
translation_key="host_cpuload",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.host.cpuload,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="host_netrole",
translation_key="host_netrole",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.host.netrole.value,
options=NETROLE_OPTIONS,
),
AirOSSensorEntityDescription(
key="wireless_frequency",
translation_key="wireless_frequency",
native_unit_of_measurement=UnitOfFrequency.MEGAHERTZ,
device_class=SensorDeviceClass.FREQUENCY,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.frequency,
),
AirOSSensorEntityDescription(
key="wireless_essid",
translation_key="wireless_essid",
value_fn=lambda data: data.wireless.essid,
),
AirOSSensorEntityDescription(
key="wireless_mode",
translation_key="wireless_mode",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.wireless.mode.value.replace("-", "_").lower(),
options=WIRELESS_MODE_OPTIONS,
),
AirOSSensorEntityDescription(
key="wireless_antenna_gain",
translation_key="wireless_antenna_gain",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.antenna_gain,
),
AirOSSensorEntityDescription(
key="wireless_throughput_tx",
translation_key="wireless_throughput_tx",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.throughput.tx,
),
AirOSSensorEntityDescription(
key="wireless_throughput_rx",
translation_key="wireless_throughput_rx",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.throughput.rx,
),
AirOSSensorEntityDescription(
key="wireless_polling_dl_capacity",
translation_key="wireless_polling_dl_capacity",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.polling.dl_capacity,
),
AirOSSensorEntityDescription(
key="wireless_polling_ul_capacity",
translation_key="wireless_polling_ul_capacity",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.polling.ul_capacity,
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AirOSConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the AirOS sensors from a config entry."""
coordinator = config_entry.runtime_data
async_add_entities(AirOSSensor(coordinator, description) for description in SENSORS)
class AirOSSensor(AirOSEntity, SensorEntity):
"""Representation of a Sensor."""
entity_description: AirOSSensorEntityDescription
def __init__(
self,
coordinator: AirOSDataUpdateCoordinator,
description: AirOSSensorEntityDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.data.derived.mac}_{description.key}"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator.data)

View File

@ -1,87 +0,0 @@
{
"config": {
"flow_title": "Ubiquiti airOS device",
"step": {
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"host": "IP address or hostname of the airOS device",
"username": "Administrator username for the airOS device, normally 'ubnt'",
"password": "Password configured through the UISP app or web interface"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"key_data_missing": "Expected data not returned from the device, check the documentation for supported devices",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
}
},
"entity": {
"sensor": {
"host_cpuload": {
"name": "CPU load"
},
"host_netrole": {
"name": "Network role",
"state": {
"bridge": "Bridge",
"router": "Router"
}
},
"wireless_frequency": {
"name": "Wireless frequency"
},
"wireless_essid": {
"name": "Wireless SSID"
},
"wireless_mode": {
"name": "Wireless mode",
"state": {
"ap_ptp": "Access point",
"sta_ptp": "Station"
}
},
"wireless_antenna_gain": {
"name": "Antenna gain"
},
"wireless_throughput_tx": {
"name": "Throughput transmit (actual)"
},
"wireless_throughput_rx": {
"name": "Throughput receive (actual)"
},
"wireless_polling_dl_capacity": {
"name": "Download capacity"
},
"wireless_polling_ul_capacity": {
"name": "Upload capacity"
},
"wireless_remote_hostname": {
"name": "Remote hostname"
}
}
},
"exceptions": {
"invalid_auth": {
"message": "[%key:common::config_flow::error::invalid_auth%]"
},
"cannot_connect": {
"message": "[%key:common::config_flow::error::cannot_connect%]"
},
"key_data_missing": {
"message": "Key data not returned from device"
},
"error_data_missing": {
"message": "Data incomplete or missing"
}
}
}

View File

@ -6,5 +6,6 @@ CONF_RETURN_AVERAGE: Final = "return_average"
CONF_CLIP_NEGATIVE: Final = "clip_negatives"
DOMAIN: Final = "airq"
MANUFACTURER: Final = "CorantGmbH"
CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³"
ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³"
UPDATE_INTERVAL: float = 10.0

View File

@ -10,7 +10,7 @@ from aioairq.core import AirQ, identify_warming_up_sensors
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
@ -39,7 +39,7 @@ class AirQCoordinator(DataUpdateCoordinator):
name=DOMAIN,
update_interval=timedelta(seconds=UPDATE_INTERVAL),
)
session = async_create_clientsession(hass)
session = async_get_clientsession(hass)
self.airq = AirQ(
entry.data[CONF_IP_ADDRESS], entry.data[CONF_PASSWORD], session
)

View File

@ -4,6 +4,9 @@
"health_index": {
"default": "mdi:heart-pulse"
},
"absolute_humidity": {
"default": "mdi:water"
},
"oxygen": {
"default": "mdi:leaf"
},

View File

@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioairq"],
"requirements": ["aioairq==0.4.6"]
"requirements": ["aioairq==0.4.4"]
}

View File

@ -14,7 +14,6 @@ from homeassistant.components.sensor import (
SensorStateClass,
)
from homeassistant.const import (
CONCENTRATION_GRAMS_PER_CUBIC_METER,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
@ -29,7 +28,10 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AirQConfigEntry, AirQCoordinator
from .const import ACTIVITY_BECQUEREL_PER_CUBIC_METER
from .const import (
ACTIVITY_BECQUEREL_PER_CUBIC_METER,
CONCENTRATION_GRAMS_PER_CUBIC_METER,
)
_LOGGER = logging.getLogger(__name__)
@ -193,7 +195,7 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
),
AirQEntityDescription(
key="humidity_abs",
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY,
translation_key="absolute_humidity",
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("humidity_abs"),

View File

@ -93,6 +93,9 @@
"health_index": {
"name": "Health index"
},
"absolute_humidity": {
"name": "Absolute humidity"
},
"hydrogen": {
"name": "Hydrogen"
},

View File

@ -5,22 +5,23 @@ from __future__ import annotations
from datetime import timedelta
import logging
from airthings import Airthings
from airthings import Airthings, AirthingsDevice, AirthingsError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONF_SECRET
from .coordinator import AirthingsDataUpdateCoordinator
from .const import CONF_SECRET, DOMAIN
_LOGGER = logging.getLogger(__name__)
PLATFORMS: list[Platform] = [Platform.SENSOR]
SCAN_INTERVAL = timedelta(minutes=6)
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
type AirthingsDataCoordinatorType = DataUpdateCoordinator[dict[str, AirthingsDevice]]
type AirthingsConfigEntry = ConfigEntry[AirthingsDataCoordinatorType]
async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool:
@ -31,8 +32,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
async_get_clientsession(hass),
)
coordinator = AirthingsDataUpdateCoordinator(hass, airthings)
async def _update_method() -> dict[str, AirthingsDevice]:
"""Get the latest data from Airthings."""
try:
return await airthings.update_devices() # type: ignore[no-any-return]
except AirthingsError as err:
raise UpdateFailed(f"Unable to fetch data: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
config_entry=entry,
name=DOMAIN,
update_method=_update_method,
update_interval=SCAN_INTERVAL,
)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator

View File

@ -45,8 +45,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
)
errors = {}
await self.async_set_unique_id(user_input[CONF_ID])
self._abort_if_unique_id_configured()
try:
await airthings.get_token(
@ -62,6 +60,9 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(user_input[CONF_ID])
self._abort_if_unique_id_configured()
return self.async_create_entry(title="Airthings", data=user_input)
return self.async_show_form(

View File

@ -1,36 +0,0 @@
"""The Airthings integration."""
from datetime import timedelta
import logging
from airthings import Airthings, AirthingsDevice, AirthingsError
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=6)
class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]):
"""Coordinator for Airthings data updates."""
def __init__(self, hass: HomeAssistant, airthings: Airthings) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_method=self._update_method,
update_interval=SCAN_INTERVAL,
)
self.airthings = airthings
async def _update_method(self) -> dict[str, AirthingsDevice]:
"""Get the latest data from Airthings."""
try:
return await self.airthings.update_devices() # type: ignore[no-any-return]
except AirthingsError as err:
raise UpdateFailed(f"Unable to fetch data: {err}") from err

View File

@ -19,7 +19,6 @@ from homeassistant.const import (
SIGNAL_STRENGTH_DECIBELS,
EntityCategory,
UnitOfPressure,
UnitOfSoundPressure,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
@ -28,44 +27,32 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AirthingsConfigEntry
from . import AirthingsConfigEntry, AirthingsDataCoordinatorType
from .const import DOMAIN
from .coordinator import AirthingsDataUpdateCoordinator
SENSORS: dict[str, SensorEntityDescription] = {
"radonShortTermAvg": SensorEntityDescription(
key="radonShortTermAvg",
native_unit_of_measurement="Bq/m³",
translation_key="radon",
suggested_display_precision=0,
),
"temp": SensorEntityDescription(
key="temp",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
),
"humidity": SensorEntityDescription(
key="humidity",
device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"pressure": SensorEntityDescription(
key="pressure",
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
native_unit_of_measurement=UnitOfPressure.MBAR,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
),
"sla": SensorEntityDescription(
key="sla",
device_class=SensorDeviceClass.SOUND_PRESSURE,
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"battery": SensorEntityDescription(
key="battery",
@ -73,47 +60,40 @@ SENSORS: dict[str, SensorEntityDescription] = {
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"co2": SensorEntityDescription(
key="co2",
device_class=SensorDeviceClass.CO2,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"voc": SensorEntityDescription(
key="voc",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"light": SensorEntityDescription(
key="light",
native_unit_of_measurement=PERCENTAGE,
translation_key="light",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"lux": SensorEntityDescription(
key="lux",
device_class=SensorDeviceClass.ILLUMINANCE,
native_unit_of_measurement=LIGHT_LUX,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"virusRisk": SensorEntityDescription(
key="virusRisk",
translation_key="virus_risk",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"mold": SensorEntityDescription(
key="mold",
translation_key="mold",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"rssi": SensorEntityDescription(
key="rssi",
@ -122,21 +102,18 @@ SENSORS: dict[str, SensorEntityDescription] = {
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"pm1": SensorEntityDescription(
key="pm1",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM1,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
"pm25": SensorEntityDescription(
key="pm25",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
),
}
@ -150,7 +127,7 @@ async def async_setup_entry(
coordinator = entry.runtime_data
entities = [
AirthingsDeviceSensor(
AirthingsHeaterEnergySensor(
coordinator,
airthings_device,
SENSORS[sensor_types],
@ -162,8 +139,8 @@ async def async_setup_entry(
async_add_entities(entities)
class AirthingsDeviceSensor(
CoordinatorEntity[AirthingsDataUpdateCoordinator], SensorEntity
class AirthingsHeaterEnergySensor(
CoordinatorEntity[AirthingsDataCoordinatorType], SensorEntity
):
"""Representation of a Airthings Sensor device."""
@ -172,7 +149,7 @@ class AirthingsDeviceSensor(
def __init__(
self,
coordinator: AirthingsDataUpdateCoordinator,
coordinator: AirthingsDataCoordinatorType,
airthings_device: AirthingsDevice,
entity_description: SensorEntityDescription,
) -> None:

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
"iot_class": "local_push",
"loggers": ["airtouch5py"],
"requirements": ["airtouch5py==0.3.0"]
"requirements": ["airtouch5py==0.2.11"]
}

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"iot_class": "cloud_push",
"loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.7.1"]
"requirements": ["aioairzone-cloud==0.6.12"]
}

View File

@ -505,13 +505,8 @@ class ClimateCapabilities(AlexaEntity):
):
yield AlexaThermostatController(self.hass, self.entity)
yield AlexaTemperatureSensor(self.hass, self.entity)
if (
self.entity.domain == water_heater.DOMAIN
and (
supported_features
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
)
and self.entity.attributes.get(water_heater.ATTR_OPERATION_LIST)
if self.entity.domain == water_heater.DOMAIN and (
supported_features & water_heater.WaterHeaterEntityFeature.OPERATION_MODE
):
yield AlexaModeController(
self.entity,
@ -639,9 +634,7 @@ class FanCapabilities(AlexaEntity):
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}"
)
force_range_controller = False
if supported & fan.FanEntityFeature.PRESET_MODE and self.entity.attributes.get(
fan.ATTR_PRESET_MODES
):
if supported & fan.FanEntityFeature.PRESET_MODE:
yield AlexaModeController(
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}"
)
@ -679,11 +672,7 @@ class RemoteCapabilities(AlexaEntity):
yield AlexaPowerController(self.entity)
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
if (
activities
and (supported & remote.RemoteEntityFeature.ACTIVITY)
and self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
):
if activities and supported & remote.RemoteEntityFeature.ACTIVITY:
yield AlexaModeController(
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
)
@ -703,9 +692,7 @@ class HumidifierCapabilities(AlexaEntity):
"""Yield the supported interfaces."""
yield AlexaPowerController(self.entity)
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if (
supported & humidifier.HumidifierEntityFeature.MODES
) and self.entity.attributes.get(humidifier.ATTR_AVAILABLE_MODES):
if supported & humidifier.HumidifierEntityFeature.MODES:
yield AlexaModeController(
self.entity, instance=f"{humidifier.DOMAIN}.{humidifier.ATTR_MODE}"
)

View File

@ -1,115 +0,0 @@
"""Support for binary sensors."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Final
from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SENSOR_STATE_OFF
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Alexa Devices binary sensor entity description."""
is_on_fn: Callable[[AmazonDevice, str], bool]
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
BINARY_SENSORS: Final = (
AmazonBinarySensorEntityDescription(
key="online",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
is_on_fn=lambda device, _: device.online,
),
AmazonBinarySensorEntityDescription(
key="bluetooth",
entity_category=EntityCategory.DIAGNOSTIC,
translation_key="bluetooth",
is_on_fn=lambda device, _: device.bluetooth_state,
),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="humanPresenceDetectionState",
device_class=BinarySensorDeviceClass.MOTION,
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AmazonConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Alexa Devices binary sensors based on a config entry."""
coordinator = entry.runtime_data
async_add_entities(
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in BINARY_SENSORS
for serial_num in coordinator.data
if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key)
)
class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
"""Binary sensor device."""
entity_description: AmazonBinarySensorEntityDescription
@property
def is_on(self) -> bool:
"""Return True if the binary sensor is on."""
return self.entity_description.is_on_fn(
self.device, self.entity_description.key
)

View File

@ -1,131 +0,0 @@
"""Config flow for Alexa Devices integration."""
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from aioamazondevices.api import AmazonEchoApi
from aioamazondevices.exceptions import (
CannotAuthenticate,
CannotConnect,
CannotRetrieveData,
WrongCountry,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_CODE, CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.selector import CountrySelector
from .const import CONF_LOGIN_DATA, DOMAIN
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CODE): cv.string,
}
)
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
"""Validate the user input allows us to connect."""
session = aiohttp_client.async_create_clientsession(hass)
api = AmazonEchoApi(
session,
data[CONF_COUNTRY],
data[CONF_USERNAME],
data[CONF_PASSWORD],
)
return await api.login_mode_interactive(data[CONF_CODE])
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Alexa Devices."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors = {}
if user_input:
try:
data = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"
except WrongCountry:
errors["base"] = "wrong_country"
else:
await self.async_set_unique_id(data["customer_info"]["user_id"])
self._abort_if_unique_id_configured()
user_input.pop(CONF_CODE)
return self.async_create_entry(
title=user_input[CONF_USERNAME],
data=user_input | {CONF_LOGIN_DATA: data},
)
return self.async_show_form(
step_id="user",
errors=errors,
data_schema=vol.Schema(
{
vol.Required(
CONF_COUNTRY, default=self.hass.config.country
): CountrySelector(),
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CODE): cv.string,
}
),
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauth flow."""
self.context["title_placeholders"] = {CONF_USERNAME: entry_data[CONF_USERNAME]}
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth confirm."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
entry_data = reauth_entry.data
if user_input is not None:
try:
await validate_input(self.hass, {**reauth_entry.data, **user_input})
except CannotConnect:
errors["base"] = "cannot_connect"
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data={
CONF_USERNAME: entry_data[CONF_USERNAME],
CONF_PASSWORD: entry_data[CONF_PASSWORD],
CONF_CODE: user_input[CONF_CODE],
},
)
return self.async_show_form(
step_id="reauth_confirm",
description_placeholders={CONF_USERNAME: entry_data[CONF_USERNAME]},
data_schema=STEP_REAUTH_DATA_SCHEMA,
errors=errors,
)

View File

@ -1,66 +0,0 @@
"""Diagnostics support for Alexa Devices integration."""
from __future__ import annotations
from typing import Any
from aioamazondevices.api import AmazonDevice
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntry
from .coordinator import AmazonConfigEntry
TO_REDACT = {CONF_PASSWORD, CONF_USERNAME, CONF_NAME, "title"}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: AmazonConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = entry.runtime_data
devices: list[dict[str, dict[str, Any]]] = [
build_device_data(device) for device in coordinator.data.values()
]
return {
"entry": async_redact_data(entry.as_dict(), TO_REDACT),
"device_info": {
"last_update success": coordinator.last_update_success,
"last_exception": repr(coordinator.last_exception),
"devices": devices,
},
}
async def async_get_device_diagnostics(
hass: HomeAssistant, entry: AmazonConfigEntry, device_entry: DeviceEntry
) -> dict[str, Any]:
"""Return diagnostics for a device."""
coordinator = entry.runtime_data
assert device_entry.serial_number
return build_device_data(coordinator.data[device_entry.serial_number])
def build_device_data(device: AmazonDevice) -> dict[str, Any]:
"""Build device data for diagnostics."""
return {
"account name": device.account_name,
"capabilities": device.capabilities,
"device family": device.device_family,
"device type": device.device_type,
"device cluster members": device.device_cluster_members,
"online": device.online,
"serial number": device.serial_number,
"software version": device.software_version,
"do not disturb": device.do_not_disturb,
"response style": device.response_style,
"bluetooth state": device.bluetooth_state,
}

View File

@ -1,50 +0,0 @@
{
"entity": {
"binary_sensor": {
"bluetooth": {
"default": "mdi:bluetooth-off",
"state": {
"on": "mdi:bluetooth"
}
},
"baby_cry_detection": {
"default": "mdi:account-voice-off",
"state": {
"on": "mdi:account-voice"
}
},
"beeping_appliance_detection": {
"default": "mdi:bell-off",
"state": {
"on": "mdi:bell-ring"
}
},
"cough_detection": {
"default": "mdi:blur-off",
"state": {
"on": "mdi:blur"
}
},
"dog_bark_detection": {
"default": "mdi:dog-side-off",
"state": {
"on": "mdi:dog-side"
}
},
"water_sounds_detection": {
"default": "mdi:water-pump-off",
"state": {
"on": "mdi:water-pump"
}
}
}
},
"services": {
"send_sound": {
"service": "mdi:cast-audio"
},
"send_text_command": {
"service": "mdi:microphone-message"
}
}
}

View File

@ -1,12 +0,0 @@
{
"domain": "alexa_devices",
"name": "Alexa Devices",
"codeowners": ["@chemelli74"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/alexa_devices",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "silver",
"requirements": ["aioamazondevices==4.0.0"]
}

View File

@ -1,88 +0,0 @@
"""Support for sensors."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Final
from aioamazondevices.api import AmazonDevice
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.const import LIGHT_LUX, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AmazonSensorEntityDescription(SensorEntityDescription):
"""Amazon Devices sensor entity description."""
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
SENSORS: Final = (
AmazonSensorEntityDescription(
key="temperature",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement_fn=lambda device, _key: (
UnitOfTemperature.CELSIUS
if device.sensors[_key].scale == "CELSIUS"
else UnitOfTemperature.FAHRENHEIT
),
),
AmazonSensorEntityDescription(
key="illuminance",
device_class=SensorDeviceClass.ILLUMINANCE,
native_unit_of_measurement=LIGHT_LUX,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AmazonConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Amazon Devices sensors based on a config entry."""
coordinator = entry.runtime_data
async_add_entities(
AmazonSensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in SENSORS
for serial_num in coordinator.data
if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None
)
class AmazonSensorEntity(AmazonEntity, SensorEntity):
"""Sensor device."""
entity_description: AmazonSensorEntityDescription
@property
def native_unit_of_measurement(self) -> str | None:
"""Return the unit of measurement of the sensor."""
if self.entity_description.native_unit_of_measurement_fn:
return self.entity_description.native_unit_of_measurement_fn(
self.device, self.entity_description.key
)
return super().native_unit_of_measurement
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.device.sensors[self.entity_description.key].value

View File

@ -1,121 +0,0 @@
"""Support for services."""
from aioamazondevices.sounds import SOUNDS_LIST
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_DEVICE_ID
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, device_registry as dr
from .const import DOMAIN
from .coordinator import AmazonConfigEntry
ATTR_TEXT_COMMAND = "text_command"
ATTR_SOUND = "sound"
ATTR_SOUND_VARIANT = "sound_variant"
SERVICE_TEXT_COMMAND = "send_text_command"
SERVICE_SOUND_NOTIFICATION = "send_sound"
SCHEMA_SOUND_SERVICE = vol.Schema(
{
vol.Required(ATTR_SOUND): cv.string,
vol.Required(ATTR_SOUND_VARIANT): cv.positive_int,
vol.Required(ATTR_DEVICE_ID): cv.string,
},
)
SCHEMA_CUSTOM_COMMAND = vol.Schema(
{
vol.Required(ATTR_TEXT_COMMAND): cv.string,
vol.Required(ATTR_DEVICE_ID): cv.string,
}
)
@callback
def async_get_entry_id_for_service_call(
call: ServiceCall,
) -> tuple[dr.DeviceEntry, AmazonConfigEntry]:
"""Get the entry ID related to a service call (by device ID)."""
device_registry = dr.async_get(call.hass)
device_id = call.data[ATTR_DEVICE_ID]
if (device_entry := device_registry.async_get(device_id)) is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_device_id",
translation_placeholders={"device_id": device_id},
)
for entry_id in device_entry.config_entries:
if (entry := call.hass.config_entries.async_get_entry(entry_id)) is None:
continue
if entry.domain == DOMAIN:
if entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="entry_not_loaded",
translation_placeholders={"entry": entry.title},
)
return (device_entry, entry)
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="config_entry_not_found",
translation_placeholders={"device_id": device_id},
)
async def _async_execute_action(call: ServiceCall, attribute: str) -> None:
"""Execute action on the device."""
device, config_entry = async_get_entry_id_for_service_call(call)
assert device.serial_number
value: str = call.data[attribute]
coordinator = config_entry.runtime_data
if attribute == ATTR_SOUND:
variant: int = call.data[ATTR_SOUND_VARIANT]
pad = "_" if variant > 10 else "_0"
file = f"{value}{pad}{variant!s}"
if value not in SOUNDS_LIST or variant > SOUNDS_LIST[value]:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_sound_value",
translation_placeholders={"sound": value, "variant": str(variant)},
)
await coordinator.api.call_alexa_sound(
coordinator.data[device.serial_number], file
)
elif attribute == ATTR_TEXT_COMMAND:
await coordinator.api.call_alexa_text_command(
coordinator.data[device.serial_number], value
)
async def async_send_sound_notification(call: ServiceCall) -> None:
"""Send a sound notification to a AmazonDevice."""
await _async_execute_action(call, ATTR_SOUND)
async def async_send_text_command(call: ServiceCall) -> None:
"""Send a custom command to a AmazonDevice."""
await _async_execute_action(call, ATTR_TEXT_COMMAND)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up the services for the Amazon Devices integration."""
for service_name, method, schema in (
(
SERVICE_SOUND_NOTIFICATION,
async_send_sound_notification,
SCHEMA_SOUND_SERVICE,
),
(
SERVICE_TEXT_COMMAND,
async_send_text_command,
SCHEMA_CUSTOM_COMMAND,
),
):
hass.services.async_register(DOMAIN, service_name, method, schema=schema)

View File

@ -1,504 +0,0 @@
send_text_command:
fields:
device_id:
required: true
selector:
device:
integration: alexa_devices
text_command:
required: true
example: "Play B.B.C. on TuneIn"
selector:
text:
send_sound:
fields:
device_id:
required: true
selector:
device:
integration: alexa_devices
sound_variant:
required: true
example: 1
default: 1
selector:
number:
min: 1
max: 50
sound:
required: true
example: amzn_sfx_doorbell_chime
default: amzn_sfx_doorbell_chime
selector:
select:
options:
- air_horn
- air_horns
- airboat
- airport
- aliens
- amzn_sfx_airplane_takeoff_whoosh
- amzn_sfx_army_march_clank_7x
- amzn_sfx_army_march_large_8x
- amzn_sfx_army_march_small_8x
- amzn_sfx_baby_big_cry
- amzn_sfx_baby_cry
- amzn_sfx_baby_fuss
- amzn_sfx_battle_group_clanks
- amzn_sfx_battle_man_grunts
- amzn_sfx_battle_men_grunts
- amzn_sfx_battle_men_horses
- amzn_sfx_battle_noisy_clanks
- amzn_sfx_battle_yells_men
- amzn_sfx_battle_yells_men_run
- amzn_sfx_bear_groan_roar
- amzn_sfx_bear_roar_grumble
- amzn_sfx_bear_roar_small
- amzn_sfx_beep_1x
- amzn_sfx_bell_med_chime
- amzn_sfx_bell_short_chime
- amzn_sfx_bell_timer
- amzn_sfx_bicycle_bell_ring
- amzn_sfx_bird_chickadee_chirp_1x
- amzn_sfx_bird_chickadee_chirps
- amzn_sfx_bird_forest
- amzn_sfx_bird_forest_short
- amzn_sfx_bird_robin_chirp_1x
- amzn_sfx_boing_long_1x
- amzn_sfx_boing_med_1x
- amzn_sfx_boing_short_1x
- amzn_sfx_bus_drive_past
- amzn_sfx_buzz_electronic
- amzn_sfx_buzzer_loud_alarm
- amzn_sfx_buzzer_small
- amzn_sfx_car_accelerate
- amzn_sfx_car_accelerate_noisy
- amzn_sfx_car_click_seatbelt
- amzn_sfx_car_close_door_1x
- amzn_sfx_car_drive_past
- amzn_sfx_car_honk_1x
- amzn_sfx_car_honk_2x
- amzn_sfx_car_honk_3x
- amzn_sfx_car_honk_long_1x
- amzn_sfx_car_into_driveway
- amzn_sfx_car_into_driveway_fast
- amzn_sfx_car_slam_door_1x
- amzn_sfx_car_undo_seatbelt
- amzn_sfx_cat_angry_meow_1x
- amzn_sfx_cat_angry_screech_1x
- amzn_sfx_cat_long_meow_1x
- amzn_sfx_cat_meow_1x
- amzn_sfx_cat_purr
- amzn_sfx_cat_purr_meow
- amzn_sfx_chicken_cluck
- amzn_sfx_church_bell_1x
- amzn_sfx_church_bells_ringing
- amzn_sfx_clear_throat_ahem
- amzn_sfx_clock_ticking
- amzn_sfx_clock_ticking_long
- amzn_sfx_copy_machine
- amzn_sfx_cough
- amzn_sfx_crow_caw_1x
- amzn_sfx_crowd_applause
- amzn_sfx_crowd_bar
- amzn_sfx_crowd_bar_rowdy
- amzn_sfx_crowd_boo
- amzn_sfx_crowd_cheer_med
- amzn_sfx_crowd_excited_cheer
- amzn_sfx_dog_med_bark_1x
- amzn_sfx_dog_med_bark_2x
- amzn_sfx_dog_med_bark_growl
- amzn_sfx_dog_med_growl_1x
- amzn_sfx_dog_med_woof_1x
- amzn_sfx_dog_small_bark_2x
- amzn_sfx_door_open
- amzn_sfx_door_shut
- amzn_sfx_doorbell
- amzn_sfx_doorbell_buzz
- amzn_sfx_doorbell_chime
- amzn_sfx_drinking_slurp
- amzn_sfx_drum_and_cymbal
- amzn_sfx_drum_comedy
- amzn_sfx_earthquake_rumble
- amzn_sfx_electric_guitar
- amzn_sfx_electronic_beep
- amzn_sfx_electronic_major_chord
- amzn_sfx_elephant
- amzn_sfx_elevator_bell_1x
- amzn_sfx_elevator_open_bell
- amzn_sfx_fairy_melodic_chimes
- amzn_sfx_fairy_sparkle_chimes
- amzn_sfx_faucet_drip
- amzn_sfx_faucet_running
- amzn_sfx_fireplace_crackle
- amzn_sfx_fireworks
- amzn_sfx_fireworks_firecrackers
- amzn_sfx_fireworks_launch
- amzn_sfx_fireworks_whistles
- amzn_sfx_food_frying
- amzn_sfx_footsteps
- amzn_sfx_footsteps_muffled
- amzn_sfx_ghost_spooky
- amzn_sfx_glass_on_table
- amzn_sfx_glasses_clink
- amzn_sfx_horse_gallop_4x
- amzn_sfx_horse_huff_whinny
- amzn_sfx_horse_neigh
- amzn_sfx_horse_neigh_low
- amzn_sfx_horse_whinny
- amzn_sfx_human_walking
- amzn_sfx_jar_on_table_1x
- amzn_sfx_kitchen_ambience
- amzn_sfx_large_crowd_cheer
- amzn_sfx_large_fire_crackling
- amzn_sfx_laughter
- amzn_sfx_laughter_giggle
- amzn_sfx_lightning_strike
- amzn_sfx_lion_roar
- amzn_sfx_magic_blast_1x
- amzn_sfx_monkey_calls_3x
- amzn_sfx_monkey_chimp
- amzn_sfx_monkeys_chatter
- amzn_sfx_motorcycle_accelerate
- amzn_sfx_motorcycle_engine_idle
- amzn_sfx_motorcycle_engine_rev
- amzn_sfx_musical_drone_intro
- amzn_sfx_oars_splashing_rowboat
- amzn_sfx_object_on_table_2x
- amzn_sfx_ocean_wave_1x
- amzn_sfx_ocean_wave_on_rocks_1x
- amzn_sfx_ocean_wave_surf
- amzn_sfx_people_walking
- amzn_sfx_person_running
- amzn_sfx_piano_note_1x
- amzn_sfx_punch
- amzn_sfx_rain
- amzn_sfx_rain_on_roof
- amzn_sfx_rain_thunder
- amzn_sfx_rat_squeak_2x
- amzn_sfx_rat_squeaks
- amzn_sfx_raven_caw_1x
- amzn_sfx_raven_caw_2x
- amzn_sfx_restaurant_ambience
- amzn_sfx_rooster_crow
- amzn_sfx_scifi_air_escaping
- amzn_sfx_scifi_alarm
- amzn_sfx_scifi_alien_voice
- amzn_sfx_scifi_boots_walking
- amzn_sfx_scifi_close_large_explosion
- amzn_sfx_scifi_door_open
- amzn_sfx_scifi_engines_on
- amzn_sfx_scifi_engines_on_large
- amzn_sfx_scifi_engines_on_short_burst
- amzn_sfx_scifi_explosion
- amzn_sfx_scifi_explosion_2x
- amzn_sfx_scifi_incoming_explosion
- amzn_sfx_scifi_laser_gun_battle
- amzn_sfx_scifi_laser_gun_fires
- amzn_sfx_scifi_laser_gun_fires_large
- amzn_sfx_scifi_long_explosion_1x
- amzn_sfx_scifi_missile
- amzn_sfx_scifi_motor_short_1x
- amzn_sfx_scifi_open_airlock
- amzn_sfx_scifi_radar_high_ping
- amzn_sfx_scifi_radar_low
- amzn_sfx_scifi_radar_medium
- amzn_sfx_scifi_run_away
- amzn_sfx_scifi_sheilds_up
- amzn_sfx_scifi_short_low_explosion
- amzn_sfx_scifi_small_whoosh_flyby
- amzn_sfx_scifi_small_zoom_flyby
- amzn_sfx_scifi_sonar_ping_3x
- amzn_sfx_scifi_sonar_ping_4x
- amzn_sfx_scifi_spaceship_flyby
- amzn_sfx_scifi_timer_beep
- amzn_sfx_scifi_zap_backwards
- amzn_sfx_scifi_zap_electric
- amzn_sfx_sheep_baa
- amzn_sfx_sheep_bleat
- amzn_sfx_silverware_clank
- amzn_sfx_sirens
- amzn_sfx_sleigh_bells
- amzn_sfx_small_stream
- amzn_sfx_sneeze
- amzn_sfx_stream
- amzn_sfx_strong_wind_desert
- amzn_sfx_strong_wind_whistling
- amzn_sfx_subway_leaving
- amzn_sfx_subway_passing
- amzn_sfx_subway_stopping
- amzn_sfx_swoosh_cartoon_fast
- amzn_sfx_swoosh_fast_1x
- amzn_sfx_swoosh_fast_6x
- amzn_sfx_test_tone
- amzn_sfx_thunder_rumble
- amzn_sfx_toilet_flush
- amzn_sfx_trumpet_bugle
- amzn_sfx_turkey_gobbling
- amzn_sfx_typing_medium
- amzn_sfx_typing_short
- amzn_sfx_typing_typewriter
- amzn_sfx_vacuum_off
- amzn_sfx_vacuum_on
- amzn_sfx_walking_in_mud
- amzn_sfx_walking_in_snow
- amzn_sfx_walking_on_grass
- amzn_sfx_water_dripping
- amzn_sfx_water_droplets
- amzn_sfx_wind_strong_gusting
- amzn_sfx_wind_whistling_desert
- amzn_sfx_wings_flap_4x
- amzn_sfx_wings_flap_fast
- amzn_sfx_wolf_howl
- amzn_sfx_wolf_young_howl
- amzn_sfx_wooden_door
- amzn_sfx_wooden_door_creaks_long
- amzn_sfx_wooden_door_creaks_multiple
- amzn_sfx_wooden_door_creaks_open
- amzn_ui_sfx_gameshow_bridge
- amzn_ui_sfx_gameshow_countdown_loop_32s_full
- amzn_ui_sfx_gameshow_countdown_loop_64s_full
- amzn_ui_sfx_gameshow_countdown_loop_64s_minimal
- amzn_ui_sfx_gameshow_intro
- amzn_ui_sfx_gameshow_negative_response
- amzn_ui_sfx_gameshow_neutral_response
- amzn_ui_sfx_gameshow_outro
- amzn_ui_sfx_gameshow_player1
- amzn_ui_sfx_gameshow_player2
- amzn_ui_sfx_gameshow_player3
- amzn_ui_sfx_gameshow_player4
- amzn_ui_sfx_gameshow_positive_response
- amzn_ui_sfx_gameshow_tally_negative
- amzn_ui_sfx_gameshow_tally_positive
- amzn_ui_sfx_gameshow_waiting_loop_30s
- anchor
- answering_machines
- arcs_sparks
- arrows_bows
- baby
- back_up_beeps
- bars_restaurants
- baseball
- basketball
- battles
- beeps_tones
- bell
- bikes
- billiards
- board_games
- body
- boing
- books
- bow_wash
- box
- break_shatter_smash
- breaks
- brooms_mops
- bullets
- buses
- buzz
- buzz_hums
- buzzers
- buzzers_pistols
- cables_metal
- camera
- cannons
- car_alarm
- car_alarms
- car_cell_phones
- carnivals_fairs
- cars
- casino
- casinos
- cellar
- chimes
- chimes_bells
- chorus
- christmas
- church_bells
- clock
- cloth
- concrete
- construction
- construction_factory
- crashes
- crowds
- debris
- dining_kitchens
- dinosaurs
- dripping
- drops
- electric
- electrical
- elevator
- evolution_monsters
- explosions
- factory
- falls
- fax_scanner_copier
- feedback_mics
- fight
- fire
- fire_extinguisher
- fireballs
- fireworks
- fishing_pole
- flags
- football
- footsteps
- futuristic
- futuristic_ship
- gameshow
- gear
- ghosts_demons
- giant_monster
- glass
- glasses_clink
- golf
- gorilla
- grenade_lanucher
- griffen
- gyms_locker_rooms
- handgun_loading
- handgun_shot
- handle
- hands
- heartbeats_ekg
- helicopter
- high_tech
- hit_punch_slap
- hits
- horns
- horror
- hot_tub_filling_up
- human
- human_vocals
- hygene # codespell:ignore
- ice_skating
- ignitions
- infantry
- intro
- jet
- juggling
- key_lock
- kids
- knocks
- lab_equip
- lacrosse
- lamps_lanterns
- leather
- liquid_suction
- locker_doors
- machine_gun
- magic_spells
- medium_large_explosions
- metal
- modern_rings
- money_coins
- motorcycles
- movement
- moves
- nature
- oar_boat
- pagers
- paintball
- paper
- parachute
- pay_phones
- phone_beeps
- pigmy_bats
- pills
- pour_water
- power_up_down
- printers
- prison
- public_space
- racquetball
- radios_static
- rain
- rc_airplane
- rc_car
- refrigerators_freezers
- regular
- respirator
- rifle
- roller_coaster
- rollerskates_rollerblades
- room_tones
- ropes_climbing
- rotary_rings
- rowboat_canoe
- rubber
- running
- sails
- sand_gravel
- screen_doors
- screens
- seats_stools
- servos
- shoes_boots
- shotgun
- shower
- sink_faucet
- sink_filling_water
- sink_run_and_off
- sink_water_splatter
- sirens
- skateboards
- ski
- skids_tires
- sled
- slides
- small_explosions
- snow
- snowmobile
- soldiers
- splash_water
- splashes_sprays
- sports_whistles
- squeaks
- squeaky
- stairs
- steam
- submarine_diesel
- swing_doors
- switches_levers
- swords
- tape
- tape_machine
- televisions_shows
- tennis_pingpong
- textile
- throw
- thunder
- ticks
- timer
- toilet_flush
- tone
- tones_noises
- toys
- tractors
- traffic
- train
- trucks_vans
- turnstiles
- typing
- umbrella
- underwater
- vampires
- various
- video_tunes
- volcano_earthquake
- watches
- water
- water_running
- werewolves
- winches_gears
- wind
- wood
- wood_boat
- woosh
- zap
- zippers
translation_key: sound

View File

@ -1,616 +0,0 @@
{
"common": {
"data_code": "One-time password (OTP code)",
"data_description_country": "The country where your Amazon account is registered.",
"data_description_username": "The email address of your Amazon account.",
"data_description_password": "The password of your Amazon account.",
"data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported.",
"device_id_description": "The ID of the device to send the command to."
},
"config": {
"flow_title": "{username}",
"step": {
"user": {
"data": {
"country": "[%key:common::config_flow::data::country%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
"code": "[%key:component::alexa_devices::common::data_code%]"
},
"data_description": {
"country": "[%key:component::alexa_devices::common::data_description_country%]",
"username": "[%key:component::alexa_devices::common::data_description_username%]",
"password": "[%key:component::alexa_devices::common::data_description_password%]",
"code": "[%key:component::alexa_devices::common::data_description_code%]"
}
},
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"code": "[%key:component::alexa_devices::common::data_code%]"
},
"data_description": {
"password": "[%key:component::alexa_devices::common::data_description_password%]",
"code": "[%key:component::alexa_devices::common::data_description_code%]"
}
}
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"cannot_retrieve_data": "Unable to retrieve data from Amazon. Please try again later.",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"wrong_country": "Wrong country selected. Please select the country where your Amazon account is registered.",
"unknown": "[%key:common::config_flow::error::unknown%]"
}
},
"entity": {
"binary_sensor": {
"bluetooth": {
"name": "Bluetooth"
},
"baby_cry_detection": {
"name": "Baby crying"
},
"beeping_appliance_detection": {
"name": "Beeping appliance"
},
"cough_detection": {
"name": "Coughing"
},
"dog_bark_detection": {
"name": "Dog barking"
},
"water_sounds_detection": {
"name": "Water sounds"
}
},
"notify": {
"speak": {
"name": "Speak"
},
"announce": {
"name": "Announce"
}
},
"switch": {
"do_not_disturb": {
"name": "Do not disturb"
}
}
},
"services": {
"send_sound": {
"name": "Send sound",
"description": "Sends a sound to a device",
"fields": {
"device_id": {
"name": "Device",
"description": "[%key:component::alexa_devices::common::device_id_description%]"
},
"sound": {
"name": "Alexa Skill sound file",
"description": "The sound file to play."
},
"sound_variant": {
"name": "Sound variant",
"description": "The variant of the sound to play."
}
}
},
"send_text_command": {
"name": "Send text command",
"description": "Sends a text command to a device",
"fields": {
"text_command": {
"name": "Alexa text command",
"description": "The text command to send."
},
"device_id": {
"name": "Device",
"description": "[%key:component::alexa_devices::common::device_id_description%]"
}
}
}
},
"selector": {
"sound": {
"options": {
"air_horn": "Air Horn",
"air_horns": "Air Horns",
"airboat": "Airboat",
"airport": "Airport",
"aliens": "Aliens",
"amzn_sfx_airplane_takeoff_whoosh": "Airplane Takeoff Whoosh",
"amzn_sfx_army_march_clank_7x": "Army March Clank 7x",
"amzn_sfx_army_march_large_8x": "Army March Large 8x",
"amzn_sfx_army_march_small_8x": "Army March Small 8x",
"amzn_sfx_baby_big_cry": "Baby Big Cry",
"amzn_sfx_baby_cry": "Baby Cry",
"amzn_sfx_baby_fuss": "Baby Fuss",
"amzn_sfx_battle_group_clanks": "Battle Group Clanks",
"amzn_sfx_battle_man_grunts": "Battle Man Grunts",
"amzn_sfx_battle_men_grunts": "Battle Men Grunts",
"amzn_sfx_battle_men_horses": "Battle Men Horses",
"amzn_sfx_battle_noisy_clanks": "Battle Noisy Clanks",
"amzn_sfx_battle_yells_men": "Battle Yells Men",
"amzn_sfx_battle_yells_men_run": "Battle Yells Men Run",
"amzn_sfx_bear_groan_roar": "Bear Groan Roar",
"amzn_sfx_bear_roar_grumble": "Bear Roar Grumble",
"amzn_sfx_bear_roar_small": "Bear Roar Small",
"amzn_sfx_beep_1x": "Beep 1x",
"amzn_sfx_bell_med_chime": "Bell Med Chime",
"amzn_sfx_bell_short_chime": "Bell Short Chime",
"amzn_sfx_bell_timer": "Bell Timer",
"amzn_sfx_bicycle_bell_ring": "Bicycle Bell Ring",
"amzn_sfx_bird_chickadee_chirp_1x": "Bird Chickadee Chirp 1x",
"amzn_sfx_bird_chickadee_chirps": "Bird Chickadee Chirps",
"amzn_sfx_bird_forest": "Bird Forest",
"amzn_sfx_bird_forest_short": "Bird Forest Short",
"amzn_sfx_bird_robin_chirp_1x": "Bird Robin Chirp 1x",
"amzn_sfx_boing_long_1x": "Boing Long 1x",
"amzn_sfx_boing_med_1x": "Boing Med 1x",
"amzn_sfx_boing_short_1x": "Boing Short 1x",
"amzn_sfx_bus_drive_past": "Bus Drive Past",
"amzn_sfx_buzz_electronic": "Buzz Electronic",
"amzn_sfx_buzzer_loud_alarm": "Buzzer Loud Alarm",
"amzn_sfx_buzzer_small": "Buzzer Small",
"amzn_sfx_car_accelerate": "Car Accelerate",
"amzn_sfx_car_accelerate_noisy": "Car Accelerate Noisy",
"amzn_sfx_car_click_seatbelt": "Car Click Seatbelt",
"amzn_sfx_car_close_door_1x": "Car Close Door 1x",
"amzn_sfx_car_drive_past": "Car Drive Past",
"amzn_sfx_car_honk_1x": "Car Honk 1x",
"amzn_sfx_car_honk_2x": "Car Honk 2x",
"amzn_sfx_car_honk_3x": "Car Honk 3x",
"amzn_sfx_car_honk_long_1x": "Car Honk Long 1x",
"amzn_sfx_car_into_driveway": "Car Into Driveway",
"amzn_sfx_car_into_driveway_fast": "Car Into Driveway Fast",
"amzn_sfx_car_slam_door_1x": "Car Slam Door 1x",
"amzn_sfx_car_undo_seatbelt": "Car Undo Seatbelt",
"amzn_sfx_cat_angry_meow_1x": "Cat Angry Meow 1x",
"amzn_sfx_cat_angry_screech_1x": "Cat Angry Screech 1x",
"amzn_sfx_cat_long_meow_1x": "Cat Long Meow 1x",
"amzn_sfx_cat_meow_1x": "Cat Meow 1x",
"amzn_sfx_cat_purr": "Cat Purr",
"amzn_sfx_cat_purr_meow": "Cat Purr Meow",
"amzn_sfx_chicken_cluck": "Chicken Cluck",
"amzn_sfx_church_bell_1x": "Church Bell 1x",
"amzn_sfx_church_bells_ringing": "Church Bells Ringing",
"amzn_sfx_clear_throat_ahem": "Clear Throat Ahem",
"amzn_sfx_clock_ticking": "Clock Ticking",
"amzn_sfx_clock_ticking_long": "Clock Ticking Long",
"amzn_sfx_copy_machine": "Copy Machine",
"amzn_sfx_cough": "Cough",
"amzn_sfx_crow_caw_1x": "Crow Caw 1x",
"amzn_sfx_crowd_applause": "Crowd Applause",
"amzn_sfx_crowd_bar": "Crowd Bar",
"amzn_sfx_crowd_bar_rowdy": "Crowd Bar Rowdy",
"amzn_sfx_crowd_boo": "Crowd Boo",
"amzn_sfx_crowd_cheer_med": "Crowd Cheer Med",
"amzn_sfx_crowd_excited_cheer": "Crowd Excited Cheer",
"amzn_sfx_dog_med_bark_1x": "Dog Med Bark 1x",
"amzn_sfx_dog_med_bark_2x": "Dog Med Bark 2x",
"amzn_sfx_dog_med_bark_growl": "Dog Med Bark Growl",
"amzn_sfx_dog_med_growl_1x": "Dog Med Growl 1x",
"amzn_sfx_dog_med_woof_1x": "Dog Med Woof 1x",
"amzn_sfx_dog_small_bark_2x": "Dog Small Bark 2x",
"amzn_sfx_door_open": "Door Open",
"amzn_sfx_door_shut": "Door Shut",
"amzn_sfx_doorbell": "Doorbell",
"amzn_sfx_doorbell_buzz": "Doorbell Buzz",
"amzn_sfx_doorbell_chime": "Doorbell Chime",
"amzn_sfx_drinking_slurp": "Drinking Slurp",
"amzn_sfx_drum_and_cymbal": "Drum And Cymbal",
"amzn_sfx_drum_comedy": "Drum Comedy",
"amzn_sfx_earthquake_rumble": "Earthquake Rumble",
"amzn_sfx_electric_guitar": "Electric Guitar",
"amzn_sfx_electronic_beep": "Electronic Beep",
"amzn_sfx_electronic_major_chord": "Electronic Major Chord",
"amzn_sfx_elephant": "Elephant",
"amzn_sfx_elevator_bell_1x": "Elevator Bell 1x",
"amzn_sfx_elevator_open_bell": "Elevator Open Bell",
"amzn_sfx_fairy_melodic_chimes": "Fairy Melodic Chimes",
"amzn_sfx_fairy_sparkle_chimes": "Fairy Sparkle Chimes",
"amzn_sfx_faucet_drip": "Faucet Drip",
"amzn_sfx_faucet_running": "Faucet Running",
"amzn_sfx_fireplace_crackle": "Fireplace Crackle",
"amzn_sfx_fireworks": "Fireworks",
"amzn_sfx_fireworks_firecrackers": "Fireworks Firecrackers",
"amzn_sfx_fireworks_launch": "Fireworks Launch",
"amzn_sfx_fireworks_whistles": "Fireworks Whistles",
"amzn_sfx_food_frying": "Food Frying",
"amzn_sfx_footsteps": "Footsteps",
"amzn_sfx_footsteps_muffled": "Footsteps Muffled",
"amzn_sfx_ghost_spooky": "Ghost Spooky",
"amzn_sfx_glass_on_table": "Glass On Table",
"amzn_sfx_glasses_clink": "Glasses Clink",
"amzn_sfx_horse_gallop_4x": "Horse Gallop 4x",
"amzn_sfx_horse_huff_whinny": "Horse Huff Whinny",
"amzn_sfx_horse_neigh": "Horse Neigh",
"amzn_sfx_horse_neigh_low": "Horse Neigh Low",
"amzn_sfx_horse_whinny": "Horse Whinny",
"amzn_sfx_human_walking": "Human Walking",
"amzn_sfx_jar_on_table_1x": "Jar On Table 1x",
"amzn_sfx_kitchen_ambience": "Kitchen Ambience",
"amzn_sfx_large_crowd_cheer": "Large Crowd Cheer",
"amzn_sfx_large_fire_crackling": "Large Fire Crackling",
"amzn_sfx_laughter": "Laughter",
"amzn_sfx_laughter_giggle": "Laughter Giggle",
"amzn_sfx_lightning_strike": "Lightning Strike",
"amzn_sfx_lion_roar": "Lion Roar",
"amzn_sfx_magic_blast_1x": "Magic Blast 1x",
"amzn_sfx_monkey_calls_3x": "Monkey Calls 3x",
"amzn_sfx_monkey_chimp": "Monkey Chimp",
"amzn_sfx_monkeys_chatter": "Monkeys Chatter",
"amzn_sfx_motorcycle_accelerate": "Motorcycle Accelerate",
"amzn_sfx_motorcycle_engine_idle": "Motorcycle Engine Idle",
"amzn_sfx_motorcycle_engine_rev": "Motorcycle Engine Rev",
"amzn_sfx_musical_drone_intro": "Musical Drone Intro",
"amzn_sfx_oars_splashing_rowboat": "Oars Splashing Rowboat",
"amzn_sfx_object_on_table_2x": "Object On Table 2x",
"amzn_sfx_ocean_wave_1x": "Ocean Wave 1x",
"amzn_sfx_ocean_wave_on_rocks_1x": "Ocean Wave On Rocks 1x",
"amzn_sfx_ocean_wave_surf": "Ocean Wave Surf",
"amzn_sfx_people_walking": "People Walking",
"amzn_sfx_person_running": "Person Running",
"amzn_sfx_piano_note_1x": "Piano Note 1x",
"amzn_sfx_punch": "Punch",
"amzn_sfx_rain": "Rain",
"amzn_sfx_rain_on_roof": "Rain On Roof",
"amzn_sfx_rain_thunder": "Rain Thunder",
"amzn_sfx_rat_squeak_2x": "Rat Squeak 2x",
"amzn_sfx_rat_squeaks": "Rat Squeaks",
"amzn_sfx_raven_caw_1x": "Raven Caw 1x",
"amzn_sfx_raven_caw_2x": "Raven Caw 2x",
"amzn_sfx_restaurant_ambience": "Restaurant Ambience",
"amzn_sfx_rooster_crow": "Rooster Crow",
"amzn_sfx_scifi_air_escaping": "Scifi Air Escaping",
"amzn_sfx_scifi_alarm": "Scifi Alarm",
"amzn_sfx_scifi_alien_voice": "Scifi Alien Voice",
"amzn_sfx_scifi_boots_walking": "Scifi Boots Walking",
"amzn_sfx_scifi_close_large_explosion": "Scifi Close Large Explosion",
"amzn_sfx_scifi_door_open": "Scifi Door Open",
"amzn_sfx_scifi_engines_on": "Scifi Engines On",
"amzn_sfx_scifi_engines_on_large": "Scifi Engines On Large",
"amzn_sfx_scifi_engines_on_short_burst": "Scifi Engines On Short Burst",
"amzn_sfx_scifi_explosion": "Scifi Explosion",
"amzn_sfx_scifi_explosion_2x": "Scifi Explosion 2x",
"amzn_sfx_scifi_incoming_explosion": "Scifi Incoming Explosion",
"amzn_sfx_scifi_laser_gun_battle": "Scifi Laser Gun Battle",
"amzn_sfx_scifi_laser_gun_fires": "Scifi Laser Gun Fires",
"amzn_sfx_scifi_laser_gun_fires_large": "Scifi Laser Gun Fires Large",
"amzn_sfx_scifi_long_explosion_1x": "Scifi Long Explosion 1x",
"amzn_sfx_scifi_missile": "Scifi Missile",
"amzn_sfx_scifi_motor_short_1x": "Scifi Motor Short 1x",
"amzn_sfx_scifi_open_airlock": "Scifi Open Airlock",
"amzn_sfx_scifi_radar_high_ping": "Scifi Radar High Ping",
"amzn_sfx_scifi_radar_low": "Scifi Radar Low",
"amzn_sfx_scifi_radar_medium": "Scifi Radar Medium",
"amzn_sfx_scifi_run_away": "Scifi Run Away",
"amzn_sfx_scifi_sheilds_up": "Scifi Sheilds Up",
"amzn_sfx_scifi_short_low_explosion": "Scifi Short Low Explosion",
"amzn_sfx_scifi_small_whoosh_flyby": "Scifi Small Whoosh Flyby",
"amzn_sfx_scifi_small_zoom_flyby": "Scifi Small Zoom Flyby",
"amzn_sfx_scifi_sonar_ping_3x": "Scifi Sonar Ping 3x",
"amzn_sfx_scifi_sonar_ping_4x": "Scifi Sonar Ping 4x",
"amzn_sfx_scifi_spaceship_flyby": "Scifi Spaceship Flyby",
"amzn_sfx_scifi_timer_beep": "Scifi Timer Beep",
"amzn_sfx_scifi_zap_backwards": "Scifi Zap Backwards",
"amzn_sfx_scifi_zap_electric": "Scifi Zap Electric",
"amzn_sfx_sheep_baa": "Sheep Baa",
"amzn_sfx_sheep_bleat": "Sheep Bleat",
"amzn_sfx_silverware_clank": "Silverware Clank",
"amzn_sfx_sirens": "Sirens",
"amzn_sfx_sleigh_bells": "Sleigh Bells",
"amzn_sfx_small_stream": "Small Stream",
"amzn_sfx_sneeze": "Sneeze",
"amzn_sfx_stream": "Stream",
"amzn_sfx_strong_wind_desert": "Strong Wind Desert",
"amzn_sfx_strong_wind_whistling": "Strong Wind Whistling",
"amzn_sfx_subway_leaving": "Subway Leaving",
"amzn_sfx_subway_passing": "Subway Passing",
"amzn_sfx_subway_stopping": "Subway Stopping",
"amzn_sfx_swoosh_cartoon_fast": "Swoosh Cartoon Fast",
"amzn_sfx_swoosh_fast_1x": "Swoosh Fast 1x",
"amzn_sfx_swoosh_fast_6x": "Swoosh Fast 6x",
"amzn_sfx_test_tone": "Test Tone",
"amzn_sfx_thunder_rumble": "Thunder Rumble",
"amzn_sfx_toilet_flush": "Toilet Flush",
"amzn_sfx_trumpet_bugle": "Trumpet Bugle",
"amzn_sfx_turkey_gobbling": "Turkey Gobbling",
"amzn_sfx_typing_medium": "Typing Medium",
"amzn_sfx_typing_short": "Typing Short",
"amzn_sfx_typing_typewriter": "Typing Typewriter",
"amzn_sfx_vacuum_off": "Vacuum Off",
"amzn_sfx_vacuum_on": "Vacuum On",
"amzn_sfx_walking_in_mud": "Walking In Mud",
"amzn_sfx_walking_in_snow": "Walking In Snow",
"amzn_sfx_walking_on_grass": "Walking On Grass",
"amzn_sfx_water_dripping": "Water Dripping",
"amzn_sfx_water_droplets": "Water Droplets",
"amzn_sfx_wind_strong_gusting": "Wind Strong Gusting",
"amzn_sfx_wind_whistling_desert": "Wind Whistling Desert",
"amzn_sfx_wings_flap_4x": "Wings Flap 4x",
"amzn_sfx_wings_flap_fast": "Wings Flap Fast",
"amzn_sfx_wolf_howl": "Wolf Howl",
"amzn_sfx_wolf_young_howl": "Wolf Young Howl",
"amzn_sfx_wooden_door": "Wooden Door",
"amzn_sfx_wooden_door_creaks_long": "Wooden Door Creaks Long",
"amzn_sfx_wooden_door_creaks_multiple": "Wooden Door Creaks Multiple",
"amzn_sfx_wooden_door_creaks_open": "Wooden Door Creaks Open",
"amzn_ui_sfx_gameshow_bridge": "Gameshow Bridge",
"amzn_ui_sfx_gameshow_countdown_loop_32s_full": "Gameshow Countdown Loop 32s Full",
"amzn_ui_sfx_gameshow_countdown_loop_64s_full": "Gameshow Countdown Loop 64s Full",
"amzn_ui_sfx_gameshow_countdown_loop_64s_minimal": "Gameshow Countdown Loop 64s Minimal",
"amzn_ui_sfx_gameshow_intro": "Gameshow Intro",
"amzn_ui_sfx_gameshow_negative_response": "Gameshow Negative Response",
"amzn_ui_sfx_gameshow_neutral_response": "Gameshow Neutral Response",
"amzn_ui_sfx_gameshow_outro": "Gameshow Outro",
"amzn_ui_sfx_gameshow_player1": "Gameshow Player1",
"amzn_ui_sfx_gameshow_player2": "Gameshow Player2",
"amzn_ui_sfx_gameshow_player3": "Gameshow Player3",
"amzn_ui_sfx_gameshow_player4": "Gameshow Player4",
"amzn_ui_sfx_gameshow_positive_response": "Gameshow Positive Response",
"amzn_ui_sfx_gameshow_tally_negative": "Gameshow Tally Negative",
"amzn_ui_sfx_gameshow_tally_positive": "Gameshow Tally Positive",
"amzn_ui_sfx_gameshow_waiting_loop_30s": "Gameshow Waiting Loop 30s",
"anchor": "Anchor",
"answering_machines": "Answering Machines",
"arcs_sparks": "Arcs Sparks",
"arrows_bows": "Arrows Bows",
"baby": "Baby",
"back_up_beeps": "Back Up Beeps",
"bars_restaurants": "Bars Restaurants",
"baseball": "Baseball",
"basketball": "Basketball",
"battles": "Battles",
"beeps_tones": "Beeps Tones",
"bell": "Bell",
"bikes": "Bikes",
"billiards": "Billiards",
"board_games": "Board Games",
"body": "Body",
"boing": "Boing",
"books": "Books",
"bow_wash": "Bow Wash",
"box": "Box",
"break_shatter_smash": "Break Shatter Smash",
"breaks": "Breaks",
"brooms_mops": "Brooms Mops",
"bullets": "Bullets",
"buses": "Buses",
"buzz": "Buzz",
"buzz_hums": "Buzz Hums",
"buzzers": "Buzzers",
"buzzers_pistols": "Buzzers Pistols",
"cables_metal": "Cables Metal",
"camera": "Camera",
"cannons": "Cannons",
"car_alarm": "Car Alarm",
"car_alarms": "Car Alarms",
"car_cell_phones": "Car Cell Phones",
"carnivals_fairs": "Carnivals Fairs",
"cars": "Cars",
"casino": "Casino",
"casinos": "Casinos",
"cellar": "Cellar",
"chimes": "Chimes",
"chimes_bells": "Chimes Bells",
"chorus": "Chorus",
"christmas": "Christmas",
"church_bells": "Church Bells",
"clock": "Clock",
"cloth": "Cloth",
"concrete": "Concrete",
"construction": "Construction",
"construction_factory": "Construction Factory",
"crashes": "Crashes",
"crowds": "Crowds",
"debris": "Debris",
"dining_kitchens": "Dining Kitchens",
"dinosaurs": "Dinosaurs",
"dripping": "Dripping",
"drops": "Drops",
"electric": "Electric",
"electrical": "Electrical",
"elevator": "Elevator",
"evolution_monsters": "Evolution Monsters",
"explosions": "Explosions",
"factory": "Factory",
"falls": "Falls",
"fax_scanner_copier": "Fax Scanner Copier",
"feedback_mics": "Feedback Mics",
"fight": "Fight",
"fire": "Fire",
"fire_extinguisher": "Fire Extinguisher",
"fireballs": "Fireballs",
"fireworks": "Fireworks",
"fishing_pole": "Fishing Pole",
"flags": "Flags",
"football": "Football",
"footsteps": "Footsteps",
"futuristic": "Futuristic",
"futuristic_ship": "Futuristic Ship",
"gameshow": "Gameshow",
"gear": "Gear",
"ghosts_demons": "Ghosts Demons",
"giant_monster": "Giant Monster",
"glass": "Glass",
"glasses_clink": "Glasses Clink",
"golf": "Golf",
"gorilla": "Gorilla",
"grenade_lanucher": "Grenade Lanucher",
"griffen": "Griffen",
"gyms_locker_rooms": "Gyms Locker Rooms",
"handgun_loading": "Handgun Loading",
"handgun_shot": "Handgun Shot",
"handle": "Handle",
"hands": "Hands",
"heartbeats_ekg": "Heartbeats EKG",
"helicopter": "Helicopter",
"high_tech": "High Tech",
"hit_punch_slap": "Hit Punch Slap",
"hits": "Hits",
"horns": "Horns",
"horror": "Horror",
"hot_tub_filling_up": "Hot Tub Filling Up",
"human": "Human",
"human_vocals": "Human Vocals",
"hygene": "Hygene",
"ice_skating": "Ice Skating",
"ignitions": "Ignitions",
"infantry": "Infantry",
"intro": "Intro",
"jet": "Jet",
"juggling": "Juggling",
"key_lock": "Key Lock",
"kids": "Kids",
"knocks": "Knocks",
"lab_equip": "Lab Equip",
"lacrosse": "Lacrosse",
"lamps_lanterns": "Lamps Lanterns",
"leather": "Leather",
"liquid_suction": "Liquid Suction",
"locker_doors": "Locker Doors",
"machine_gun": "Machine Gun",
"magic_spells": "Magic Spells",
"medium_large_explosions": "Medium Large Explosions",
"metal": "Metal",
"modern_rings": "Modern Rings",
"money_coins": "Money Coins",
"motorcycles": "Motorcycles",
"movement": "Movement",
"moves": "Moves",
"nature": "Nature",
"oar_boat": "Oar Boat",
"pagers": "Pagers",
"paintball": "Paintball",
"paper": "Paper",
"parachute": "Parachute",
"pay_phones": "Pay Phones",
"phone_beeps": "Phone Beeps",
"pigmy_bats": "Pigmy Bats",
"pills": "Pills",
"pour_water": "Pour Water",
"power_up_down": "Power Up Down",
"printers": "Printers",
"prison": "Prison",
"public_space": "Public Space",
"racquetball": "Racquetball",
"radios_static": "Radios Static",
"rain": "Rain",
"rc_airplane": "RC Airplane",
"rc_car": "RC Car",
"refrigerators_freezers": "Refrigerators Freezers",
"regular": "Regular",
"respirator": "Respirator",
"rifle": "Rifle",
"roller_coaster": "Roller Coaster",
"rollerskates_rollerblades": "RollerSkates RollerBlades",
"room_tones": "Room Tones",
"ropes_climbing": "Ropes Climbing",
"rotary_rings": "Rotary Rings",
"rowboat_canoe": "Rowboat Canoe",
"rubber": "Rubber",
"running": "Running",
"sails": "Sails",
"sand_gravel": "Sand Gravel",
"screen_doors": "Screen Doors",
"screens": "Screens",
"seats_stools": "Seats Stools",
"servos": "Servos",
"shoes_boots": "Shoes Boots",
"shotgun": "Shotgun",
"shower": "Shower",
"sink_faucet": "Sink Faucet",
"sink_filling_water": "Sink Filling Water",
"sink_run_and_off": "Sink Run And Off",
"sink_water_splatter": "Sink Water Splatter",
"sirens": "Sirens",
"skateboards": "Skateboards",
"ski": "Ski",
"skids_tires": "Skids Tires",
"sled": "Sled",
"slides": "Slides",
"small_explosions": "Small Explosions",
"snow": "Snow",
"snowmobile": "Snowmobile",
"soldiers": "Soldiers",
"splash_water": "Splash Water",
"splashes_sprays": "Splashes Sprays",
"sports_whistles": "Sports Whistles",
"squeaks": "Squeaks",
"squeaky": "Squeaky",
"stairs": "Stairs",
"steam": "Steam",
"submarine_diesel": "Submarine Diesel",
"swing_doors": "Swing Doors",
"switches_levers": "Switches Levers",
"swords": "Swords",
"tape": "Tape",
"tape_machine": "Tape Machine",
"televisions_shows": "Televisions Shows",
"tennis_pingpong": "Tennis PingPong",
"textile": "Textile",
"throw": "Throw",
"thunder": "Thunder",
"ticks": "Ticks",
"timer": "Timer",
"toilet_flush": "Toilet Flush",
"tone": "Tone",
"tones_noises": "Tones Noises",
"toys": "Toys",
"tractors": "Tractors",
"traffic": "Traffic",
"train": "Train",
"trucks_vans": "Trucks Vans",
"turnstiles": "Turnstiles",
"typing": "Typing",
"umbrella": "Umbrella",
"underwater": "Underwater",
"vampires": "Vampires",
"various": "Various",
"video_tunes": "Video Tunes",
"volcano_earthquake": "Volcano Earthquake",
"watches": "Watches",
"water": "Water",
"water_running": "Water Running",
"werewolves": "Werewolves",
"winches_gears": "Winches Gears",
"wind": "Wind",
"wood": "Wood",
"wood_boat": "Wood Boat",
"woosh": "Woosh",
"zap": "Zap",
"zippers": "Zippers"
}
}
},
"exceptions": {
"cannot_connect_with_error": {
"message": "Error connecting: {error}"
},
"cannot_retrieve_data_with_error": {
"message": "Error retrieving data: {error}"
},
"device_serial_number_missing": {
"message": "Device serial number missing: {device_id}"
},
"invalid_device_id": {
"message": "Invalid device ID specified: {device_id}"
},
"invalid_sound_value": {
"message": "Invalid sound {sound} with variant {variant} specified"
},
"entry_not_loaded": {
"message": "Entry not loaded: {entry}"
}
}
}

View File

@ -1,40 +0,0 @@
"""Utils for Alexa Devices."""
from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps
from typing import Any, Concatenate
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.exceptions import HomeAssistantError
from .const import DOMAIN
from .entity import AmazonEntity
def alexa_api_call[_T: AmazonEntity, **_P](
func: Callable[Concatenate[_T, _P], Awaitable[None]],
) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]:
"""Catch Alexa API call exceptions."""
@wraps(func)
async def cmd_wrapper(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None:
"""Wrap all command methods."""
try:
await func(self, *args, **kwargs)
except CannotConnect as err:
self.coordinator.last_update_success = False
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="cannot_connect_with_error",
translation_placeholders={"error": repr(err)},
) from err
except CannotRetrieveData as err:
self.coordinator.last_update_success = False
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="cannot_retrieve_data_with_error",
translation_placeholders={"error": repr(err)},
) from err
return cmd_wrapper

View File

@ -1,27 +0,0 @@
"""The Altruist integration."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import AltruistConfigEntry, AltruistDataUpdateCoordinator
PLATFORMS = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: AltruistConfigEntry) -> bool:
"""Set up Altruist from a config entry."""
coordinator = AltruistDataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AltruistConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -1,107 +0,0 @@
"""Config flow for the Altruist integration."""
import logging
from typing import Any
from altruistclient import AltruistClient, AltruistDeviceModel, AltruistError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .const import CONF_HOST, DOMAIN
_LOGGER = logging.getLogger(__name__)
class AltruistConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Altruist."""
device: AltruistDeviceModel
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
ip_address = ""
if user_input is not None:
ip_address = user_input[CONF_HOST]
try:
client = await AltruistClient.from_ip_address(
async_get_clientsession(self.hass), ip_address
)
except AltruistError:
errors["base"] = "no_device_found"
else:
self.device = client.device
await self.async_set_unique_id(
client.device_id, raise_on_progress=False
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=self.device.id,
data={
CONF_HOST: ip_address,
},
)
data_schema = self.add_suggested_values_to_schema(
vol.Schema({vol.Required(CONF_HOST): str}),
{CONF_HOST: ip_address},
)
return self.async_show_form(
step_id="user",
data_schema=data_schema,
errors=errors,
description_placeholders={
"ip_address": ip_address,
},
)
async def async_step_zeroconf(
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
"""Handle zeroconf discovery."""
_LOGGER.debug("Zeroconf discovery: %s", discovery_info)
try:
client = await AltruistClient.from_ip_address(
async_get_clientsession(self.hass), str(discovery_info.ip_address)
)
except AltruistError:
return self.async_abort(reason="no_device_found")
self.device = client.device
_LOGGER.debug("Zeroconf device: %s", client.device)
await self.async_set_unique_id(client.device_id)
self._abort_if_unique_id_configured()
self.context.update(
{
"title_placeholders": {
"name": self.device.id,
}
}
)
return await self.async_step_discovery_confirm()
async def async_step_discovery_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm discovery."""
if user_input is not None:
return self.async_create_entry(
title=self.device.id,
data={
CONF_HOST: self.device.ip_address,
},
)
self._set_confirm_only()
return self.async_show_form(
step_id="discovery_confirm",
description_placeholders={
"model": self.device.id,
},
)

View File

@ -1,5 +0,0 @@
"""Constants for the Altruist integration."""
DOMAIN = "altruist"
CONF_HOST = "host"

View File

@ -1,64 +0,0 @@
"""Coordinator module for Altruist integration in Home Assistant.
This module defines the AltruistDataUpdateCoordinator class, which manages
data updates for Altruist sensors using the AltruistClient.
"""
from datetime import timedelta
import logging
from altruistclient import AltruistClient, AltruistError
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONF_HOST
_LOGGER = logging.getLogger(__name__)
UPDATE_INTERVAL = timedelta(seconds=15)
type AltruistConfigEntry = ConfigEntry[AltruistDataUpdateCoordinator]
class AltruistDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str]]):
"""Coordinates data updates for Altruist sensors."""
client: AltruistClient
def __init__(
self,
hass: HomeAssistant,
config_entry: AltruistConfigEntry,
) -> None:
"""Initialize the data update coordinator for Altruist sensors."""
device_id = config_entry.unique_id
super().__init__(
hass,
logger=_LOGGER,
config_entry=config_entry,
name=f"Altruist {device_id}",
update_interval=UPDATE_INTERVAL,
)
self._ip_address = config_entry.data[CONF_HOST]
async def _async_setup(self) -> None:
try:
self.client = await AltruistClient.from_ip_address(
async_get_clientsession(self.hass), self._ip_address
)
await self.client.fetch_data()
except AltruistError as e:
raise ConfigEntryNotReady("Error in Altruist setup") from e
async def _async_update_data(self) -> dict[str, str]:
try:
fetched_data = await self.client.fetch_data()
except AltruistError as ex:
raise UpdateFailed(
f"The Altruist {self.client.device_id} is unavailable: {ex}"
) from ex
return {item["value_type"]: item["value"] for item in fetched_data}

View File

@ -1,15 +0,0 @@
{
"entity": {
"sensor": {
"pm_10": {
"default": "mdi:thought-bubble"
},
"pm_25": {
"default": "mdi:thought-bubble-outline"
},
"radiation": {
"default": "mdi:radioactive"
}
}
}
}

View File

@ -1,12 +0,0 @@
{
"domain": "altruist",
"name": "Altruist",
"codeowners": ["@airalab", "@LoSk-p"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/altruist",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["altruistclient==0.1.1"],
"zeroconf": ["_altruist._tcp.local."]
}

View File

@ -1,83 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
This integration does not provide additional actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
This integration does not provide options flow.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: todo
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: |
Device type integration
entity-category: todo
entity-device-class: done
entity-disabled-by-default: todo
entity-translations: done
exception-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: No known use cases for repair issues or flows, yet
stale-devices:
status: exempt
comment: |
Device type integration
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@ -1,249 +0,0 @@
"""Defines the Altruist sensor platform."""
from collections.abc import Callable
from dataclasses import dataclass
import logging
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_MILLION,
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
EntityCategory,
UnitOfPressure,
UnitOfSoundPressure,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AltruistConfigEntry
from .coordinator import AltruistDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
@dataclass(frozen=True)
class AltruistSensorEntityDescription(SensorEntityDescription):
"""Class to describe a Sensor entity."""
native_value_fn: Callable[[str], float] = float
state_class = SensorStateClass.MEASUREMENT
SENSOR_DESCRIPTIONS = [
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.HUMIDITY,
key="BME280_humidity",
translation_key="humidity",
native_unit_of_measurement=PERCENTAGE,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "BME280"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.PRESSURE,
key="BME280_pressure",
translation_key="pressure",
native_unit_of_measurement=UnitOfPressure.PA,
suggested_unit_of_measurement=UnitOfPressure.MMHG,
suggested_display_precision=0,
translation_placeholders={"sensor_name": "BME280"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.TEMPERATURE,
key="BME280_temperature",
translation_key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "BME280"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.PRESSURE,
key="BMP_pressure",
translation_key="pressure",
native_unit_of_measurement=UnitOfPressure.PA,
suggested_unit_of_measurement=UnitOfPressure.MMHG,
suggested_display_precision=0,
translation_placeholders={"sensor_name": "BMP"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.TEMPERATURE,
key="BMP_temperature",
translation_key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "BMP"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.TEMPERATURE,
key="BMP280_temperature",
translation_key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "BMP280"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.PRESSURE,
key="BMP280_pressure",
translation_key="pressure",
native_unit_of_measurement=UnitOfPressure.PA,
suggested_unit_of_measurement=UnitOfPressure.MMHG,
suggested_display_precision=0,
translation_placeholders={"sensor_name": "BMP280"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.HUMIDITY,
key="HTU21D_humidity",
translation_key="humidity",
native_unit_of_measurement=PERCENTAGE,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "HTU21D"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.TEMPERATURE,
key="HTU21D_temperature",
translation_key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "HTU21D"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.PM10,
translation_key="pm_10",
key="SDS_P1",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
suggested_display_precision=2,
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.PM25,
translation_key="pm_25",
key="SDS_P2",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
suggested_display_precision=2,
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.HUMIDITY,
key="SHT3X_humidity",
translation_key="humidity",
native_unit_of_measurement=PERCENTAGE,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "SHT3X"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.TEMPERATURE,
key="SHT3X_temperature",
translation_key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
suggested_display_precision=2,
translation_placeholders={"sensor_name": "SHT3X"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
key="signal",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
entity_category=EntityCategory.DIAGNOSTIC,
suggested_display_precision=0,
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.SOUND_PRESSURE,
key="PCBA_noiseMax",
translation_key="noise_max",
native_unit_of_measurement=UnitOfSoundPressure.DECIBEL,
suggested_display_precision=0,
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.SOUND_PRESSURE,
key="PCBA_noiseAvg",
translation_key="noise_avg",
native_unit_of_measurement=UnitOfSoundPressure.DECIBEL,
suggested_display_precision=0,
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.CO2,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
translation_key="co2",
key="CCS_CO2",
suggested_display_precision=2,
translation_placeholders={"sensor_name": "CCS"},
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
key="CCS_TVOC",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
suggested_display_precision=2,
),
AltruistSensorEntityDescription(
key="GC",
native_unit_of_measurement="μR/h",
translation_key="radiation",
suggested_display_precision=2,
),
AltruistSensorEntityDescription(
device_class=SensorDeviceClass.CO2,
translation_key="co2",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
key="SCD4x_co2",
suggested_display_precision=2,
translation_placeholders={"sensor_name": "SCD4x"},
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AltruistConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Add sensors for passed config_entry in HA."""
coordinator = config_entry.runtime_data
async_add_entities(
AltruistSensor(coordinator, sensor_description)
for sensor_description in SENSOR_DESCRIPTIONS
if sensor_description.key in coordinator.data
)
class AltruistSensor(CoordinatorEntity[AltruistDataUpdateCoordinator], SensorEntity):
"""Implementation of a Altruist sensor."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AltruistDataUpdateCoordinator,
description: AltruistSensorEntityDescription,
) -> None:
"""Initialize the Altruist sensor."""
super().__init__(coordinator)
self._device = coordinator.client.device
self.entity_description: AltruistSensorEntityDescription = description
self._attr_unique_id = f"{self._device.id}-{description.key}"
self._attr_device_info = DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, self._device.id)},
manufacturer="Robonomics",
model="Altruist",
sw_version=self._device.fw_version,
configuration_url=f"http://{self._device.ip_address}",
serial_number=self._device.id,
)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available and self.entity_description.key in self.coordinator.data
)
@property
def native_value(self) -> float | int:
"""Return the native value of the sensor."""
string_value = self.coordinator.data[self.entity_description.key]
return self.entity_description.native_value_fn(string_value)

View File

@ -1,51 +0,0 @@
{
"config": {
"flow_title": "{name}",
"step": {
"discovery_confirm": {
"description": "Do you want to start setup {model}?"
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
},
"data_description": {
"host": "Altruist IP address or hostname in the local network"
},
"description": "Fill in Altruist IP address or hostname in your local network"
}
},
"abort": {
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
},
"error": {
"no_device_found": "[%key:common::config_flow::error::cannot_connect%]"
}
},
"entity": {
"sensor": {
"humidity": {
"name": "{sensor_name} humidity"
},
"pressure": {
"name": "{sensor_name} pressure"
},
"temperature": {
"name": "{sensor_name} temperature"
},
"noise_max": {
"name": "Maximum noise"
},
"noise_avg": {
"name": "Average noise"
},
"co2": {
"name": "{sensor_name} CO2"
},
"radiation": {
"name": "Radiation level"
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More