1
0
mirror of https://github.com/tensorchord/pgvecto.rs.git synced 2025-07-30 19:23:05 +03:00

fix: upgrade instructions (#203)

* fix: upgrade instructions

Signed-off-by: usamoi <usamoi@outlook.com>

* fix: rename scripts to install

Signed-off-by: usamoi <usamoi@outlook.com>

* fix: ipc error-handling

Signed-off-by: usamoi <usamoi@outlook.com>

* feat: soft version

Signed-off-by: usamoi <usamoi@outlook.com>

* chore: freebsd

Signed-off-by: usamoi <usamoi@outlook.com>

---------

Signed-off-by: usamoi <usamoi@outlook.com>
This commit is contained in:
Usamoi
2023-12-28 16:16:22 +08:00
committed by GitHub
parent 78cc08b49d
commit 4d34b45b23
43 changed files with 6758 additions and 310 deletions

View File

@ -62,15 +62,29 @@ The given vector is invalid for input.
ADVICE: Check if dimensions and scalar type of the vector is matched with the index.\
")]
Unmatched2,
#[error("\
IPC connection is closed unexpected.
ADVICE: The error is raisen by background worker errors. \
Please check the full PostgreSQL log to get more information.\
")]
Ipc,
#[error("\
The extension is upgraded. However, the index files is outdated.
ADVICE: Please read `https://github.com/tensorchord/pgvecto.rs/blob/main/docs/upgrade.md`.\
")]
Upgrade,
}
pub trait FriendlyErrorLike {
fn friendly(self) -> !;
pub trait FriendlyErrorLike: Sized {
fn convert(self) -> FriendlyError;
fn friendly(self) -> ! {
panic!("pgvecto.rs: {}", self.convert());
}
}
impl FriendlyErrorLike for FriendlyError {
fn friendly(self) -> ! {
panic!("pgvecto.rs: {}", self);
fn convert(self) -> FriendlyError {
self
}
}

View File

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::error::Error;
use std::path::Path;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum MetadataError {
#[error("Invalid version.")]
InvalidVersion,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Metadata {
#[serde(default)]
pub version: Option<u64>,
#[serde(default)]
pub soft_version: Option<u64>,
}
impl Metadata {
const VERSION: u64 = 1;
const SOFT_VERSION: u64 = 1;
}
impl Metadata {
pub fn write(path: impl AsRef<Path>) {
let metadata = Metadata {
version: Some(Self::VERSION),
soft_version: Some(Self::SOFT_VERSION),
};
let contents = serde_json::to_string(&metadata).unwrap();
std::fs::write(path, contents).unwrap();
}
pub fn read(path: impl AsRef<Path>) -> Result<(), Box<dyn Error>> {
use MetadataError::*;
let contents = std::fs::read_to_string(path)?;
let metadata = serde_json::from_str::<Metadata>(&contents)?;
if Self::VERSION != metadata.version.ok_or(InvalidVersion)? {
return Err(Box::new(InvalidVersion));
}
if Self::SOFT_VERSION <= metadata.soft_version.ok_or(InvalidVersion)? {
return Err(Box::new(InvalidVersion));
}
Ok(())
}
}

View File

@ -1,4 +1,5 @@
pub mod instance;
pub mod metadata;
use self::instance::Instance;
use crate::index::IndexOptions;
@ -16,14 +17,6 @@ use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
fn magic() -> &'static [u8] {
&[1, 4, 53, 23, 34, 92, 34, 23]
}
fn check(data: &[u8]) -> bool {
magic() == data
}
pub struct Worker {
path: PathBuf,
protect: Mutex<WorkerProtect>,
@ -33,7 +26,6 @@ pub struct Worker {
impl Worker {
pub fn create(path: PathBuf) -> Arc<Self> {
std::fs::create_dir(&path).unwrap();
std::fs::write(path.join("magic"), magic()).unwrap();
std::fs::create_dir(path.join("indexes")).unwrap();
let startup = FileAtomic::create(path.join("startup"), WorkerStartup::new());
let indexes = HashMap::new();
@ -42,17 +34,18 @@ impl Worker {
});
let protect = WorkerProtect { startup, indexes };
sync_dir(&path);
self::metadata::Metadata::write(path.join("metadata"));
Arc::new(Worker {
path,
protect: Mutex::new(protect),
view: ArcSwap::new(view),
})
}
pub fn check(path: PathBuf) -> bool {
self::metadata::Metadata::read(path.join("metadata")).is_ok()
}
pub fn open(path: PathBuf) -> Arc<Self> {
let startup = FileAtomic::<WorkerStartup>::open(path.join("startup"));
if !check(&std::fs::read(path.join("magic")).unwrap_or_default()) {
panic!("Please delete the directory pg_vectors in Postgresql data folder. The files are created by older versions of postgresql or broken.");
}
clean(
path.join("indexes"),
startup.get().indexes.keys().map(|s| s.to_string()),