1
0
mirror of https://github.com/akpaevj/OneSTools.FileDatabase.git synced 2026-04-30 19:18:48 +02:00

Добавьте файлы проекта.

This commit is contained in:
Акпаев Евгений Александрович
2021-01-03 00:22:00 +03:00
parent 0d24dcf3a5
commit 3ced2c585c
21 changed files with 1393 additions and 0 deletions
+85
View File
@@ -0,0 +1,85 @@
using System;
using OneSTools.BracketsFile;
namespace OneSTools.FileDatabase.HighLevel
{
public class Field
{
/// <summary>
/// Real size of the value (bytes)
/// </summary>
internal int MaxSize { get; private set; }
/// <summary>
/// Internal name of the field
/// </summary>
public string Name { get; private set; }
/// <summary>
/// Type of the field's value
/// </summary>
public FieldType Type { get; private set; }
/// <summary>
/// If the flag is True a value can be null
/// </summary>
public bool Nullable { get; private set; }
/// <summary>
/// Length of the value
/// </summary>
public int Length { get; private set; }
/// <summary>
/// "Numeric" value only - position of a point in a value
/// </summary>
public int Precision { get; private set; }
public string CaseSensitive { get; private set; }
internal void Read(BracketsNode node)
{
Name = node[0];
var type = (string)node[1];
Type = type switch
{
"B" => FieldType.Binary,
"L" => FieldType.Logical,
"N" => FieldType.Numeric,
"NC" => FieldType.NChar,
"NVC" => FieldType.NVarChar,
"RV" => FieldType.RowVersion,
"NT" => FieldType.NText,
"I" => FieldType.Image,
"DT" => FieldType.DateTime,
_ => throw new Exception($"{type} is unknown field type"),
};
Nullable = node[2];
Length = node[3];
Precision = node[4];
CaseSensitive = node[5];
CalculateRealSize();
}
public override string ToString()
=> $"{Name} ({Type})";
private void CalculateRealSize()
{
MaxSize = Type switch
{
FieldType.Binary => Length,
FieldType.Logical => 1,
FieldType.Numeric => (Length + 1) / 2 + (Length + 1) % 2,
FieldType.NChar => Length * 2,
FieldType.NVarChar => Length * 2 + 2,
FieldType.RowVersion => 16,
FieldType.NText => 8,
FieldType.Image => 8,
FieldType.DateTime => 7,
_ => throw new NotImplementedException($"There is no alghorithm to calculate size of \"{Type}\" type"),
};
if (Nullable)
MaxSize++;
}
}
}
@@ -0,0 +1,42 @@
namespace OneSTools.FileDatabase.HighLevel
{
public enum FieldType
{
/// <summary>
/// Fixed-length binary data
/// </summary>
Binary,
/// <summary>
/// Boolean value
/// </summary>
Logical,
/// <summary>
/// Fixed point decimal
/// </summary>
Numeric,
/// <summary>
/// Fixed-length Unicode string
/// </summary>
NChar, // Unicode string
/// <summary>
/// Variable-length Unicode string
/// </summary>
NVarChar,
/// <summary>
/// Version of the row
/// </summary>
RowVersion,
/// <summary>
/// Unlimited-length Unicode string (UTF-16)
/// </summary>
NText,
/// <summary>
/// Unlimited-length binary data
/// </summary>
Image,
/// <summary>
/// Date and time
/// </summary>
DateTime
}
}
+40
View File
@@ -0,0 +1,40 @@
using OneSTools.BracketsFile;
using System.Collections.Generic;
using System.Collections.ObjectModel;
namespace OneSTools.FileDatabase.HighLevel
{
public class Index
{
/// <summary>
/// Internal name of the index
/// </summary>
public string Name { get; private set; }
/// <summary>
/// Collection of the index fields
/// </summary>
public ReadOnlyCollection<IndexField> Fields { get; private set; } = null;
internal void Read(BracketsNode node)
{
var fields = new List<IndexField>();
Name = (string)node[0];
for (int i = 2; i < node.Count; i++)
{
var indexField = new IndexField();
indexField.Read(node[i]);
fields.Add(indexField);
}
Fields = fields.AsReadOnly();
}
public override string ToString()
{
return Name;
}
}
}
@@ -0,0 +1,27 @@
using OneSTools.BracketsFile;
namespace OneSTools.FileDatabase.HighLevel
{
public class IndexField
{
/// <summary>
/// Internal name of the index field
/// </summary>
public string Name { get; private set; }
/// <summary>
/// The length of the value
/// </summary>
public int Length { get; private set; }
internal void Read(BracketsNode node)
{
Name = node[0];
Length = node[1];
}
public override string ToString()
{
return Name;
}
}
}
+120
View File
@@ -0,0 +1,120 @@
using OneSTools.BracketsFile;
using System.Collections.Generic;
using System.IO;
using System;
using OneSTools.FileDatabase.LowLevel;
using System.Collections.ObjectModel;
namespace OneSTools.FileDatabase.HighLevel
{
public class Table
{
internal int MaxRowSize { get; private set; }
internal uint DataFilePage { get; private set; }
internal uint UnlimitedLengthDataFilePage { get; private set; }
internal uint IndexFilePage { get; private set; }
/// <summary>
/// Internal name of the table
/// </summary>
public string Name { get; private set; }
/// <summary>
/// Collection of table fields
/// </summary>
public ReadOnlyCollection<Field> Fields { get; private set; } = null;
/// <summary>
/// Collection of table indexes
/// </summary>
public ReadOnlyCollection<Index> Indexes { get; private set; } = null;
public bool RecordLock { get; private set; }
/// <summary>
/// Collection of table rows
/// </summary>
public IReadOnlyList<object[]> Rows { get; private set; } = null;
internal Table(FileDatabaseStream stream, BracketsNode node)
{
Name = node[0];
for (int i = 2; i < node.Count; i++)
{
var currentNode = node[i];
string nodeName = currentNode[0];
switch (nodeName)
{
case "Fields":
ReadFields(currentNode);
break;
case "Indexes":
ReadIndexes(currentNode);
break;
case "Recordlock":
RecordLock = currentNode[1];
break;
case "Files":
DataFilePage = currentNode[1];
UnlimitedLengthDataFilePage = currentNode[2];
IndexFilePage = currentNode[3];
break;
default:
throw new Exception($"{nodeName} is unknown table description node");
}
}
Rows = new TableRows(stream, this);
}
private void ReadFields(BracketsNode node)
{
var fields = new List<Field>();
for (int i = 1; i < node.Count; i++)
{
var field = new Field();
field.Read(node[i]);
if (field.Type == FieldType.RowVersion)
{
fields.Insert(0, field);
}
else
fields.Add(field);
MaxRowSize += field.MaxSize;
}
// add "free row" mark length
MaxRowSize++;
// add "short version" data length
if (fields.Count > 0 && fields[0].Type == FieldType.RowVersion && RecordLock)
MaxRowSize += 8;
Fields = fields.AsReadOnly();
}
private void ReadIndexes(BracketsNode node)
{
var indexes = new List<Index>();
if (node.Count > 1)
{
for (int i = 1; i < node.Count; i++)
{
var index = new Index();
index.Read(node[i]);
indexes.Add(index);
}
}
Indexes = indexes.AsReadOnly();
}
public override string ToString()
{
return Name;
}
}
}
@@ -0,0 +1,342 @@
using System.Collections.Generic;
using System.Text;
using System;
using OneSTools.FileDatabase.LowLevel;
using OneSTools.FileDatabase.LowLevel.Files;
using System.Buffers.Binary;
using System.Collections;
using System.Collections.ObjectModel;
using System.Globalization;
namespace OneSTools.FileDatabase.HighLevel
{
internal class TableRows : IReadOnlyList<object[]>
{
private readonly FileDatabaseStream _stream;
private readonly Table _table;
private DataFile _dataFile;
private UnlimitedLengthDataFile _unlimitedLengthDataFile;
internal TableRows(FileDatabaseStream stream, Table table)
{
_stream = stream;
_table = table;
}
public object[] this[int index]
{
get
{
if (index >= Count)
throw new IndexOutOfRangeException();
else
{
return Get(index + 1);
}
}
}
public int Count
{
get
{
InitializeFiles();
return (int)(_dataFile?.RootPage.DataLength / Convert.ToUInt64(_table.MaxRowSize));
}
}
public IEnumerator<object[]> GetEnumerator()
{
for (int i = 0; i < Count; i++)
{
yield return Get(i + 1);
}
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
private void InitializeFiles()
{
if (_dataFile == null)
{
if (_table.DataFilePage != 0)
{
_dataFile = new DataFile(_stream, _table.DataFilePage, _table.MaxRowSize, _table.Fields.Count > 0 && _table.Fields[0].Type != FieldType.RowVersion && _table.RecordLock);
if (_dataFile.HasData())
{
_dataFile.GoToDataStartingPosition();
_dataFile.GoToRow(1);
}
}
if (_table.UnlimitedLengthDataFilePage != 0)
{
_unlimitedLengthDataFile = new UnlimitedLengthDataFile(_stream, _table.UnlimitedLengthDataFilePage);
if (_unlimitedLengthDataFile.HasData())
_dataFile.GoToDataStartingPosition();
}
}
}
private object[] Get(int rowNumber)
{
InitializeFiles();
_dataFile.GoToRow(rowNumber);
var values = new object[_table.Fields.Count];
var rawData = _dataFile.ReadRow();
if (rawData == null)
return null;
else
{
var currentOffset = 0;
for (int i = 0; i < _table.Fields.Count; i++)
{
var field = _table.Fields[i];
var fieldData = rawData[currentOffset..(currentOffset + field.MaxSize)];
currentOffset += field.MaxSize;
values[i] = field.Type switch
{
FieldType.Binary => ReadBinary(fieldData, field.Nullable),
FieldType.Logical => ReadLogical(fieldData, field.Nullable),
FieldType.Numeric => ReadNumericValue(fieldData, field.Precision, field.Nullable),
FieldType.NChar => ReadNChar(fieldData, field.Nullable),
FieldType.NVarChar => ReadNVarChar(fieldData, field.Nullable),
FieldType.RowVersion => ReadRowVersion(fieldData, field.Nullable),
FieldType.NText => ReadNText(fieldData, field.Nullable),
FieldType.Image => ReadImage(fieldData, field.Nullable),
FieldType.DateTime => ReadDateTime(fieldData, field.Nullable),
_ => throw new Exception($"Reading value for a field with type {field.Type} is not implemented")
};
}
return values;
}
}
private byte[] ReadBinary(byte[] data, bool nullable)
{
if (!HasValue(data, nullable))
return null;
else
{
var valueData = GetValueData(data, nullable);
return valueData;
}
}
private bool? ReadLogical(byte[] data, bool nullable)
{
if (!HasValue(data, nullable))
return null;
else
{
var valueData = GetValueData(data, nullable);
return valueData[0] != 0;
}
}
private decimal? ReadNumericValue(byte[] data, int precision, bool nullable)
{
if (!HasValue(data, nullable))
return null;
else
{
var valueData = GetValueData(data, nullable);
var doubleStr = new StringBuilder();
var negative = ReadTetrad(valueData[0]) == 0;
if (negative)
doubleStr.Append('-');
doubleStr.Append(ReadTetrad(valueData[0], true));
for (int i = 1; i < valueData.Length; i++)
{
doubleStr.Append(ReadTetrad(valueData[i]));
doubleStr.Append(ReadTetrad(valueData[i], true));
}
if (precision > 0)
doubleStr.Insert(doubleStr.Length - 1 - precision, '.');
// remove last zero, I don't know what is it
doubleStr.Remove(doubleStr.Length - 1, 1);
return decimal.Parse(doubleStr.ToString(), new NumberFormatInfo() { NumberDecimalSeparator = "." });
}
}
private void AddNumberToNumeric(StringBuilder doubleStr, int value)
{
// don't add leading zeros
if (value != 0)
doubleStr.Append(value);
else if (doubleStr.Length > 0)
doubleStr.Append(value);
}
private string ReadNChar(byte[] data, bool nullable)
{
if (!HasValue(data, nullable))
return null;
else
{
var valueData = GetValueData(data, nullable);
return Encoding.Unicode.GetString(valueData);
}
}
private string ReadNVarChar(byte[] data, bool nullable)
{
if (!HasValue(data, nullable))
return null;
else
{
var valueData = GetValueData(data, nullable);
var length = BinaryPrimitives.ReadUInt16LittleEndian(valueData);
if (length == 0)
return "";
else
return Encoding.Unicode.GetString(valueData[2..(length * 2 + 2)]);
}
}
private DateTime? ReadDateTime(byte[] data, bool nullable)
{
if (!HasValue(data, nullable))
return null;
else
{
var valueData = GetValueData(data, nullable);
var year1 = ReadTetrad(valueData[0]);
var year2 = ReadTetrad(valueData[0], true);
var year3 = ReadTetrad(valueData[1]);
var year4 = ReadTetrad(valueData[1], true);
var year = int.Parse($"{year1}{year2}{year3}{year4}");
var month1 = ReadTetrad(valueData[2]);
var month2 = ReadTetrad(valueData[2], true);
var month = int.Parse($"{month1}{month2}");
var day1 = ReadTetrad(valueData[3]);
var day2 = ReadTetrad(valueData[3], true);
var day = int.Parse($"{day1}{day2}");
var hour1 = ReadTetrad(valueData[4]);
var hour2 = ReadTetrad(valueData[4], true);
var hour = int.Parse($"{hour1}{hour2}");
var minute1 = ReadTetrad(valueData[5]);
var minute2 = ReadTetrad(valueData[5], true);
var minute = int.Parse($"{minute1}{minute2}");
var second1 = ReadTetrad(valueData[6]);
var second2 = ReadTetrad(valueData[6], true);
var second = int.Parse($"{second1}{second2}");
if (year == 0 && month == 0 && day == 0 && hour == 0 && minute == 0 && second == 0)
return DateTime.MinValue;
else
return new DateTime(year, month, day, hour, minute, second);
}
}
private string ReadRowVersion(byte[] data, bool nullable)
{
if (!HasValue(data, nullable))
return null;
else
{
var valueData = GetValueData(data, nullable);
var v1 = BinaryPrimitives.ReadUInt32LittleEndian(valueData[..5]);
var v2 = BinaryPrimitives.ReadUInt32LittleEndian(valueData[5..9]);
var v3 = BinaryPrimitives.ReadUInt32LittleEndian(valueData[9..13]);
var v4 = BinaryPrimitives.ReadUInt32LittleEndian(valueData[12..]);
return $"{v1}.{v2}.{v3}.{v4}";
}
}
private string ReadNText(byte[] data, bool nullable)
{
var valueData = ReadUnlimitedData(data, nullable);
if (valueData is null)
return null;
else
return Encoding.Unicode.GetString(valueData);
}
private byte[] ReadImage(byte[] data, bool nullable)
{
var valueData = ReadUnlimitedData(data, nullable);
if (valueData is null)
return null;
else
return valueData;
}
private byte[] ReadUnlimitedData(byte[] data, bool nullable)
{
if (!HasValue(data, nullable))
return null;
var valueData = GetValueData(data, nullable);
var blockNumber = BinaryPrimitives.ReadUInt32LittleEndian(valueData);
var dataLength = BinaryPrimitives.ReadUInt32LittleEndian(valueData[4..]);
_unlimitedLengthDataFile.GoToBlock(blockNumber);
var valueRawData = _unlimitedLengthDataFile.ReadBlockChain();
return valueRawData[..(int)dataLength];
}
private bool HasValue(byte[] data, bool nullable)
{
if (nullable)
return (bool)ReadLogical(data, false);
else
return true;
}
private byte[] GetValueData(byte[] data, bool nullable)
{
if (nullable)
return data[1..];
else
return data;
}
private int ReadTetrad(byte b, bool second = false)
{
if (second)
return b & 0b_0000_1111;
else
return b >> 4;
}
}
}