influxdb/server/src/config.rs

204 lines
6.3 KiB
Rust

use std::{
collections::{BTreeMap, BTreeSet},
sync::{Arc, RwLock},
};
use data_types::{
database_rules::{DatabaseRules, WriterId},
DatabaseName,
};
use mutable_buffer::MutableBufferDb;
use object_store::path::ObjectStorePath;
use read_buffer::Database as ReadBufferDb;
/// This module contains code for managing the configuration of the server.
use crate::{db::Db, Error, JobRegistry, Result};
pub(crate) const DB_RULES_FILE_NAME: &str = "rules.json";
/// The Config tracks the configuration od databases and their rules along
/// with host groups for replication. It is used as an in-memory structure
/// that can be loaded incrementally from objet storage.
#[derive(Debug)]
pub(crate) struct Config {
jobs: Arc<JobRegistry>,
state: RwLock<ConfigState>,
}
impl Config {
pub(crate) fn new(jobs: Arc<JobRegistry>) -> Self {
Self {
state: Default::default(),
jobs,
}
}
pub(crate) fn create_db(
&self,
name: DatabaseName<'static>,
rules: DatabaseRules,
) -> Result<CreateDatabaseHandle<'_>> {
let mut state = self.state.write().expect("mutex poisoned");
if state.reservations.contains(&name) || state.databases.contains_key(&name) {
return Err(Error::DatabaseAlreadyExists {
db_name: name.to_string(),
});
}
let mutable_buffer = if rules.mutable_buffer_config.is_some() {
Some(MutableBufferDb::new(name.to_string()))
} else {
None
};
let read_buffer = ReadBufferDb::new();
let wal_buffer = rules.wal_buffer_config.as_ref().map(Into::into);
let db = Arc::new(Db::new(
rules,
mutable_buffer,
read_buffer,
wal_buffer,
Arc::clone(&self.jobs),
));
state.reservations.insert(name.clone());
Ok(CreateDatabaseHandle {
db,
config: &self,
name,
})
}
pub(crate) fn db(&self, name: &DatabaseName<'_>) -> Option<Arc<Db>> {
let state = self.state.read().expect("mutex poisoned");
state.databases.get(name).cloned()
}
pub(crate) fn db_names_sorted(&self) -> Vec<DatabaseName<'static>> {
let state = self.state.read().expect("mutex poisoned");
state.databases.keys().cloned().collect()
}
pub(crate) fn remotes_sorted(&self) -> Vec<(WriterId, String)> {
let state = self.state.read().expect("mutex poisoned");
state.remotes.iter().map(|(&a, b)| (a, b.clone())).collect()
}
pub(crate) fn update_remote(&self, id: WriterId, addr: GRPCConnectionString) {
let mut state = self.state.write().expect("mutex poisoned");
state.remotes.insert(id, addr);
}
pub(crate) fn delete_remote(&self, id: WriterId) -> Option<GRPCConnectionString> {
let mut state = self.state.write().expect("mutex poisoned");
state.remotes.remove(&id)
}
fn commit(&self, name: &DatabaseName<'static>, db: Arc<Db>) {
let mut state = self.state.write().expect("mutex poisoned");
let name = state
.reservations
.take(name)
.expect("reservation doesn't exist");
assert!(state.databases.insert(name, db).is_none())
}
fn rollback(&self, name: &DatabaseName<'static>) {
let mut state = self.state.write().expect("mutex poisoned");
state.reservations.remove(name);
}
}
pub fn object_store_path_for_database_config<P: ObjectStorePath>(
root: &P,
name: &DatabaseName<'_>,
) -> P {
let mut path = root.clone();
path.push_dir(name.to_string());
path.set_file_name(DB_RULES_FILE_NAME);
path
}
/// A gRPC connection string.
pub type GRPCConnectionString = String;
#[derive(Default, Debug)]
struct ConfigState {
reservations: BTreeSet<DatabaseName<'static>>,
databases: BTreeMap<DatabaseName<'static>, Arc<Db>>,
/// Map between remote IOx server IDs and management API connection strings.
remotes: BTreeMap<WriterId, GRPCConnectionString>,
}
/// CreateDatabaseHandle is retunred when a call is made to `create_db` on
/// the Config struct. The handle can be used to hold a reservation for the
/// database name. Calling `commit` on the handle will consume the struct
/// and move the database from reserved to being in the config.
///
/// The goal is to ensure that database names can be reserved with
/// minimal time holding a write lock on the config state. This allows
/// the caller (the server) to reserve the database name, persist its
/// configuration and then commit the change in-memory after it has been
/// persisted.
#[derive(Debug)]
pub(crate) struct CreateDatabaseHandle<'a> {
pub db: Arc<Db>,
pub name: DatabaseName<'static>,
config: &'a Config,
}
impl<'a> CreateDatabaseHandle<'a> {
pub(crate) fn commit(self) {
self.config.commit(&self.name, Arc::clone(&self.db))
}
}
impl<'a> Drop for CreateDatabaseHandle<'a> {
fn drop(&mut self) {
self.config.rollback(&self.name);
}
}
#[cfg(test)]
mod test {
use object_store::{memory::InMemory, ObjectStore, ObjectStoreApi};
use super::*;
#[test]
fn create_db() {
let name = DatabaseName::new("foo").unwrap();
let config = Config::new(Arc::new(JobRegistry::new()));
let rules = DatabaseRules::new();
{
let _db_reservation = config.create_db(name.clone(), rules.clone()).unwrap();
let err = config.create_db(name.clone(), rules.clone()).unwrap_err();
assert!(matches!(err, Error::DatabaseAlreadyExists{ .. }));
}
let db_reservation = config.create_db(name.clone(), rules).unwrap();
db_reservation.commit();
assert!(config.db(&name).is_some());
assert_eq!(config.db_names_sorted(), vec![name]);
}
#[test]
fn object_store_path_for_database_config() {
let storage = ObjectStore::new_in_memory(InMemory::new());
let mut base_path = storage.new_path();
base_path.push_dir("1");
let name = DatabaseName::new("foo").unwrap();
let rules_path = super::object_store_path_for_database_config(&base_path, &name);
let mut expected_path = base_path;
expected_path.push_dir("foo");
expected_path.set_file_name("rules.json");
assert_eq!(rules_path, expected_path);
}
}