diff --git a/.gitignore b/.gitignore
index c73419b..0923db1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+tests/upscale/2d_flow_homo_samples
# Merge
*.py.orig
diff --git a/.run/pytest in stochastic.run.xml b/.run/pytest in stochastic.run.xml
index f59f413..c8f6e8b 100644
--- a/.run/pytest in stochastic.run.xml
+++ b/.run/pytest in stochastic.run.xml
@@ -3,9 +3,9 @@
-
+
-
+
diff --git a/.run/pytest test_bspline_approx.run.xml b/.run/pytest test_bspline_approx.run.xml
index 9b36e7e..ddca26d 100644
--- a/.run/pytest test_bspline_approx.run.xml
+++ b/.run/pytest test_bspline_approx.run.xml
@@ -3,9 +3,9 @@
-
+
-
+
diff --git a/.run/pytest in gmsh.run.xml b/.run/test in gmsh.run.xml
similarity index 88%
rename from .run/pytest in gmsh.run.xml
rename to .run/test in gmsh.run.xml
index 6224869..2b14895 100644
--- a/.run/pytest in gmsh.run.xml
+++ b/.run/test in gmsh.run.xml
@@ -3,9 +3,9 @@
-
+
-
+
diff --git a/.run/test_brep_writer.run.xml b/.run/test_brep_writer.run.xml
index 54781e0..5edea9a 100644
--- a/.run/test_brep_writer.run.xml
+++ b/.run/test_brep_writer.run.xml
@@ -3,9 +3,9 @@
-
+
-
+
diff --git a/.run/tox all.run.xml b/.run/tox all.run.xml
index 96eca94..6c3449b 100644
--- a/.run/tox all.run.xml
+++ b/.run/tox all.run.xml
@@ -3,7 +3,8 @@
-
+
+
diff --git a/.run/tox in gmsh.run.xml b/.run/tox in gmsh.run.xml
index ead118d..9918d78 100644
--- a/.run/tox in gmsh.run.xml
+++ b/.run/tox in gmsh.run.xml
@@ -1,9 +1,11 @@
+
-
+
+
diff --git a/.run/tox in stochastic.run.xml b/.run/tox in stochastic.run.xml
index 43cfba8..d235810 100644
--- a/.run/tox in stochastic.run.xml
+++ b/.run/tox in stochastic.run.xml
@@ -3,7 +3,8 @@
-
+
+
diff --git a/requirements.txt b/requirements.txt
index 52590b4..60f9c3e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,7 @@
pytest
-numpy
-scipy
-bih
+
+# some test plottings that are not part of auto test suite
+matplotlib
+joblib
plotly
+pyvista # fem plot
diff --git a/setup.py b/setup.py
index 99da665..bc571a7 100644
--- a/setup.py
+++ b/setup.py
@@ -42,7 +42,8 @@
# eg: 'keyword1', 'keyword2', 'keyword3',
],
- packages=['bgem', 'bgem.polygons', 'bgem.bspline', 'bgem.gmsh', 'bgem.external', 'bgem.geometry', 'bgem.stochastic'], #setuptools.find_packages(where='src'),
+ packages=['bgem', 'bgem.polygons', 'bgem.bspline', 'bgem.gmsh', 'bgem.external', 'bgem.geometry', 'bgem.stochastic',
+ 'bgem.core', 'bgem.upscale'], #setuptools.find_packages(where='src'),
package_dir={'': 'src'},
#py_modules=[os.path.splitext(os.path.basename(path))[0] for path in glob.glob('src/*.py')],
# package_data={
@@ -56,7 +57,7 @@
include_package_data=True,
zip_safe=False,
#install_requires=['numpy', 'scipy', 'bih', 'gmsh-sdk<=4.5.1'],
- install_requires=['numpy>=1.13.4', 'pandas', 'scipy', 'bih', 'gmsh>=4.10.4'],
+ install_requires=['numpy>=1.13.4', 'pandas', 'scipy', 'gmsh>=4.10.4', 'pyyaml-include', 'bih', 'nptyping', 'pyamg', 'attrs'],
# incompatible changes in SDK in release 4.6.0 to be changed in the new release of bgem
python_requires='>=3',
# extras_require={
diff --git a/src/bgem/core/__init__.py b/src/bgem/core/__init__.py
new file mode 100644
index 0000000..a6da130
--- /dev/null
+++ b/src/bgem/core/__init__.py
@@ -0,0 +1,7 @@
+from .memoize import EndorseCache, memoize, File
+from .common import substitute_placeholders, sample_from_population, workdir, array_attr
+from .config import dotdict, load_config, apply_variant, dump_config
+from .report import report
+from .flow_call import call_flow, FlowOutput
+
+year = 365.2425 * 24 * 60 * 60
diff --git a/src/bgem/core/common.py b/src/bgem/core/common.py
new file mode 100644
index 0000000..8abf65b
--- /dev/null
+++ b/src/bgem/core/common.py
@@ -0,0 +1,169 @@
+import os.path
+from typing import *
+import shutil
+import attrs
+import numpy.typing as npt
+
+from pathlib import Path
+import numpy as np
+import logging
+
+from .memoize import File
+
+class workdir:
+ """
+ Context manager for creation and usage of a workspace dir.
+
+ name: the workspace directory
+ inputs: list of files and directories to copy into the workspaceand
+ TODO: fine a sort of robust ad portable reference
+ clean: if true the workspace would be deleted at the end of the context manager.
+ TODO: clean_before / clean_after
+ TODO: File constructor taking current workdir environment, openning virtually copied files.
+ TODO: Workdir would not perform change of working dir, but provides system interface for: subprocess, file openning
+ only perform CD just before executing a subprocess also interacts with concept of an executable.
+ portable reference and with lazy evaluation. Optional true copy possible.
+ """
+ CopyArgs = Union[str, Tuple[str, str]]
+ def __init__(self, name:str="sandbox", inputs:List[CopyArgs] = None, clean=False):
+
+ if inputs is None:
+ inputs = []
+ self._inputs = inputs
+ self.work_dir = os.path.abspath(name)
+ Path(self.work_dir).mkdir(parents=True, exist_ok=True)
+ self._clean = clean
+ self._orig_dir = os.getcwd()
+
+ def copy(self, src, dest=None):
+ """
+ :param src: Realtive or absolute path.
+ :param dest: Relative path with respect to work dir.
+ Default is the same as the relative source path,
+ for abs path it is the just the last name in the path.
+ """
+ if isinstance(src, File):
+ src = src.path
+ if isinstance(dest, File):
+ dest = dest.path
+ #if dest == ".":
+ # if os.path.isabs(src):
+ # dest = os.path.basename(src)
+ # else:
+ # dest = src
+ if dest is None:
+ dest = ""
+ dest = os.path.join(self.work_dir, dest, os.path.basename(src))
+ dest_dir, _ = os.path.split(dest)
+ if not os.path.isdir(dest_dir):
+ #print(f"MAKE DIR: {dest_dir}")
+ Path(dest_dir).mkdir(parents=True, exist_ok=True)
+ abs_src = os.path.abspath(src)
+
+ # ensure that we always update the target
+ if os.path.isdir(dest):
+ shutil.rmtree(dest)
+ elif os.path.isfile(dest):
+ os.remove(dest)
+
+ # TODO: perform copy, link or redirectio to src during extraction of the File object from dictionary
+ # assumes custom tag for file, file_link, file_copy etc.
+ if os.path.isdir(src):
+ #print(f"COPY DIR: {abs_src} TO DESTINATION: {dest}")
+ shutil.copytree(abs_src, dest, dirs_exist_ok=True)
+ else:
+ try:
+ shutil.copy2(abs_src, dest)
+ except FileNotFoundError:
+ FileNotFoundError(f"COPY FILE: {abs_src} TO DESTINATION: {dest}")
+
+ def __enter__(self):
+ for item in self._inputs:
+ #print(f"treat workspace item: {item}")
+ if isinstance(item, Tuple):
+ self.copy(*item)
+ else:
+ self.copy(item)
+ os.chdir(self.work_dir)
+
+ return self.work_dir
+
+ def __exit__(self, type, value, traceback):
+ os.chdir(self._orig_dir)
+ if self._clean:
+ shutil.rmtree(self.work_dir)
+
+
+def substitute_placeholders(file_in: str, file_out: str, params: Dict[str, Any]):
+ """
+ In the template `file_in` substitute the placeholders in format ''
+ according to the dict `params`. Write the result to `file_out`.
+ TODO: set Files into params, in order to compute hash from them.
+ TODO: raise for missing value in dictionary
+ """
+ used_params = []
+ files = []
+ with open(file_in, 'r') as src:
+ text = src.read()
+ for name, value in params.items():
+ if isinstance(value, File):
+ files.append(value)
+ value = value.path
+ placeholder = '<%s>' % name
+ n_repl = text.count(placeholder)
+ if n_repl > 0:
+ used_params.append(name)
+ text = text.replace(placeholder, str(value))
+ with open(file_out, 'w') as dst:
+ dst.write(text)
+
+ return File(file_out, files), used_params
+
+
+# Directory for all flow123d main input templates.
+# These are considered part of the software.
+
+# TODO: running with stdout/ stderr capture, test for errors, log but only pass to the main in the case of
+# true error
+
+def array_attr(shape, dtype=np.double, default=[]):
+ """
+ Numpy array attribut definition for the attrs library.
+ - shape and dtype specification and auto conversion
+ - defalut empty array
+ """
+ # Unfortunately broadcast_to does not support -1 in the target_shape
+ # assume shape in form (-1, ...).
+ assert shape[0] == -1
+
+ def converter(x):
+ rev_shape = reversed( (len(x), *shape[1:]) )
+ return np.broadcast_to(np.array(x).T, rev_shape).T
+
+ return attrs.field(
+ type=npt.NDArray[dtype],
+ converter=converter,
+ default=default)
+
+
+def sample_from_population(n_samples:int, frequency:Union[np.array, int]):
+ if type(frequency) is int:
+ frequency = np.full(len(frequency), 1, dtype=int)
+ else:
+ frequency = np.array(frequency, dtype=int)
+
+ cumul_freq = np.cumsum(frequency)
+ total_samples = np.sum(frequency)
+ samples = np.random.randint(0, total_samples, size=n_samples + 1)
+ samples[-1] = total_samples # stopper
+ sample_seq = np.sort(samples)
+ # put samples into bins given by cumul_freq
+ bin_samples = np.empty_like(samples)
+ i_sample = 0
+ for ifreq, c_freq in enumerate(cumul_freq):
+
+ while sample_seq[i_sample] < c_freq:
+ bin_samples[i_sample] = ifreq
+ i_sample += 1
+
+ return bin_samples[:-1]
diff --git a/src/bgem/core/config.py b/src/bgem/core/config.py
new file mode 100644
index 0000000..5ca8e86
--- /dev/null
+++ b/src/bgem/core/config.py
@@ -0,0 +1,291 @@
+from dataclasses import dataclass
+from typing import *
+from pathlib import Path
+import os
+import yaml
+import re
+from socket import gethostname
+from glob import iglob
+import fsspec
+from fsspec.implementations.local import LocalFileSystem
+import yaml_include
+
+class RelativeLocalFileSystem(LocalFileSystem):
+ """
+ Auxiliary filesystem to allow specify the include path
+ relative to the main config_file.
+ """
+ def __init__(self, main_dir, **kwargs):
+ super().__init__(**kwargs)
+ self.main_dir = Path(main_dir)
+
+
+ def open(self, path, mode='rb', **kwargs):
+ # Attempt to open the path directly
+ try:
+ return super().open(path, mode, **kwargs)
+ except FileNotFoundError:
+ # If direct opening fails, try with base_dir prepended
+ full_path = self.main_dir / path
+ return super().open(str(full_path), mode, **kwargs)
+
+
+
+class YamlLimitedSafeLoader(type):
+ """Meta YAML loader that skips the resolution of the specified YAML tags."""
+ def __new__(cls, name, bases, namespace, do_not_resolve: List[str]) -> Type[yaml.SafeLoader]:
+ do_not_resolve = set(do_not_resolve)
+ implicit_resolvers = {
+ key: [(tag, regex) for tag, regex in mappings if tag not in do_not_resolve]
+ for key, mappings in yaml.SafeLoader.yaml_implicit_resolvers.items()
+ }
+ return super().__new__(
+ cls,
+ name,
+ (yaml.SafeLoader, *bases),
+ {**namespace, "yaml_implicit_resolvers": implicit_resolvers},
+ )
+
+class YamlNoTimestampSafeLoader(
+ metaclass=YamlLimitedSafeLoader, do_not_resolve={"tag:yaml.org,2002:timestamp"}
+):
+ """A safe YAML loader that leaves timestamps as strings."""
+ pass
+
+class dotdict(dict):
+ """
+ dot.notation access to dictionary attributes
+ TODO: keep somehow reference to the original YAML in order to report better
+ KeyError origin.
+ """
+ __setattr__ = dict.__setitem__
+ __delattr__ = dict.__delitem__
+
+ def __getattr__(self, item):
+ try:
+ return self[item]
+ except KeyError:
+ return self.__getattribute__(item)
+
+ @classmethod
+ def create(cls, cfg : Any):
+ """
+ - recursively replace all dicts by the dotdict.
+ """
+ if isinstance(cfg, dict):
+ items = ( (k, cls.create(v)) for k,v in cfg.items())
+ return dotdict(items)
+ elif isinstance(cfg, list):
+ return [cls.create(i) for i in cfg]
+ elif isinstance(cfg, tuple):
+ return tuple([cls.create(i) for i in cfg])
+ else:
+ return cfg
+
+ @staticmethod
+ def serialize(cfg):
+ if isinstance(cfg, (dict, dotdict)):
+ return { k:dotdict.serialize(v) for k,v in cfg.items()}
+ elif isinstance(cfg, list):
+ return [dotdict.serialize(i) for i in cfg]
+ elif isinstance(cfg, tuple):
+ return tuple([dotdict.serialize(i) for i in cfg])
+ else:
+ return cfg
+
+Key = Union[str, int]
+Addr = Tuple[Key]
+VariantPatch = Dict[str, dotdict]
+
+@dataclass
+class AddrIter:
+ path: Addr
+ # full address path
+ i: int = 0
+ # actual level of the path; initial -1 is before first call to `idx` or `key`.
+
+ def is_leaf(self):
+ return self.i == len(self.path)
+
+ def idx(self):
+ try:
+ return int(self.path[self.i]), AddrIter(self.path, self.i + 1)
+ except ValueError:
+ raise IndexError(f"Variant substitution: IndexError at address: '{self.address()}'.")
+
+ def key(self):
+ key = self.path[self.i]
+ if len(key) > 0 and not key[0].isdigit():
+ return key, AddrIter(self.path, self.i + 1)
+ else:
+ raise KeyError(f"Variant substitution: KeyError at address: '{self.address()}'.")
+
+ def address(self):
+ sub_path = self.path[:self.i + 1]
+ return '/'.join([str(v) for v in sub_path])
+
+
+def _item_update(key:Key, val:dotdict, sub_path:Key, sub:dotdict):
+ sub_key, path = sub_path
+ if key == sub_key:
+ if path.empty():
+ # Recursion termination
+ return sub
+ else:
+ return deep_update(val, path, sub)
+ else:
+ return val
+
+def deep_update(cfg: dotdict, iter:AddrIter, substitute:dotdict):
+ if iter.is_leaf():
+ return substitute
+
+ if isinstance(cfg, list):
+ key, sub_path = iter.idx()
+ new_cfg = list(cfg)
+ elif isinstance(cfg, (dict, dotdict)):
+ key, sub_path = iter.key()
+ new_cfg = dotdict(cfg)
+ else:
+ raise TypeError(f"Variant substitution: Unknown type {type(cfg)}")
+ new_cfg[key] = deep_update(cfg[key], sub_path, substitute)
+ return new_cfg
+
+
+
+def apply_variant(cfg:dotdict, variant:VariantPatch) -> dotdict:
+ """
+ In the `variant` dict the keys are interpreted as the address
+ in the YAML file. The address is a list of strings and ints separated by '/'
+ and representing an item of the YAML file.
+ For every `(address, value)` item of the `variant` dict the referenced item
+ in `cfg` is replaced by `value`.
+
+ Implemented by recursion with copy of changed collections.
+ May be slow for too many variant items and substitution of the large collection.
+ :param cfg:
+ :param variant: dictionary path -> dotdict
+ :return:
+ """
+ new_cfg = cfg
+ for path_str, val in variant.items():
+ path = tuple(path_str.split('/'))
+ assert path
+ new_cfg = deep_update(new_cfg, AddrIter(path), val)
+ return new_cfg
+
+# Purpose of following was adding included files into config so that we can move it to the worksapce
+# Could be simplified as yaml_include provides a cusom_loader callback now from ver 2.0
+
+# class YamlInclude(yaml_include.Constructor):
+# def __init__(self, *args, **kwargs):
+# super().__init__(*args, **kwargs)
+# self.included_files = []
+#
+# def load(
+# self,
+# loader_type,
+# datepathname: str,
+# recursive: bool = False,
+# ): # pylint:disable=too-many-arguments
+# if not encoding:
+# encoding = self._encoding or self.DEFAULT_ENCODING
+# if self._base_dir:
+# pathname = os.path.join(self._base_dir, pathname)
+# reader_clz = None
+# if reader:
+# reader_clz = get_reader_class_by_name(reader)
+# if re.match(WILDCARDS_PATTERN, pathname):
+# result = []
+# iterable = iglob(pathname, recursive=recursive)
+# for path in filter(os.path.isfile, iterable):
+# self.included_files.append(path)
+# if reader_clz:
+# result.append(reader_clz(path, encoding=encoding, loader_class=type(loader))())
+# else:
+# result.append(self._read_file(path, loader, encoding))
+# return result
+# self.included_files.append(pathname)
+# if reader_clz:
+# return reader_clz(pathname, encoding=encoding, loader_class=type(loader))()
+# return self._read_file(pathname, loader, encoding)
+
+def resolve_machine_configuration(cfg:dotdict, hostname) -> dotdict:
+ # resolve machine configuration
+ if 'machine_config' not in cfg:
+ return cfg
+ if hostname is None:
+ hostname = gethostname()
+ machine_cfg = cfg.machine_config.get(hostname, None)
+ if machine_cfg is None:
+ machine_cfg = cfg.machine_config.get('__default__', None)
+ if machine_cfg is None:
+ raise KeyError(f"Missing hostname: {hostname} in 'cfg.machine_config'.")
+ cfg.machine_config = machine_cfg
+ return cfg
+
+def load_config(path, collect_files=False, hostname=None):
+ """
+ Load configuration from given file replace, dictionaries by dotdict
+ uses pyyaml-tags namely for:
+ include tag:
+ geometry: <% include(path="config_geometry.yaml")>
+ """
+ path = Path(path)
+ included_files = []
+ def store_includes(inc_path, file, loader):
+ inc_path = Path(inc_path)
+ if not inc_path.is_absolute():
+ inc_path = path.parent / inc_path
+ included_files.append(inc_path.resolve())
+ return yaml.load(file, loader)
+ fs_hook = RelativeLocalFileSystem(main_dir=path.parent)
+ yaml.add_constructor("!include", yaml_include.Constructor(fs=fs_hook, custom_loader=store_includes), YamlNoTimestampSafeLoader)
+ #instance = YamlInclude.add_to_loader_class(loader_class=YamlNoTimestampSafeLoader, base_dir=os.path.dirname(path))
+ cfg_dir = os.path.dirname(path)
+ with open(path) as f:
+ cfg = yaml.load(f, Loader=YamlNoTimestampSafeLoader)
+ cfg['_config_root_dir'] = os.path.abspath(cfg_dir)
+ dd = dotdict.create(cfg)
+ dd = resolve_machine_configuration(dd, hostname)
+ if collect_files:
+ referenced = included_files
+ referenced.append(path)
+ other_files = collect_referenced_files(dd, ['.', cfg_dir])
+ referenced.extend([Path(p) for p in other_files])
+ dd['_file_refs'] = referenced
+ return dd
+
+def dump_config(config):
+ with open("__config_resolved.yaml", "w") as f:
+ yaml.dump(config, f)
+
+def path_search(filename, path):
+ if not isinstance(filename, str):
+ return []
+ # Abs paths intentionally not included
+ # if os.path.isabs(filename):
+ # if os.path.isabs(filename) and os.path.isfile(filename):
+ # return [os.path.abspath(filename)]
+ # else:
+ # return []
+ for dir in path:
+ full_name = os.path.join(dir, filename)
+ if os.path.isfile(full_name):
+ return [os.path.abspath(full_name)]
+ return []
+
+FilePath = NewType('FilePath', str)
+def collect_referenced_files(cfg:dotdict, search_path:List[str]) -> List[FilePath]:
+ if isinstance(cfg, (dict, dotdict)):
+ referenced = [collect_referenced_files(v, search_path) for v in cfg.values()]
+ elif isinstance(cfg, (list, tuple)):
+ referenced = [collect_referenced_files(v, search_path) for v in cfg]
+ else:
+ return path_search(cfg, search_path)
+ # flatten
+ return [i for l in referenced for i in l]
+
+
+
+
diff --git a/src/bgem/core/flow_call.py b/src/bgem/core/flow_call.py
new file mode 100644
index 0000000..af706f4
--- /dev/null
+++ b/src/bgem/core/flow_call.py
@@ -0,0 +1,144 @@
+from typing import *
+import logging
+import os
+import attrs
+from . import dotdict, memoize, File, report, substitute_placeholders, workdir
+import subprocess
+from pathlib import Path
+import yaml
+
+def search_file(basename, extensions):
+ """
+ Return first found file or None.
+ """
+ if type(extensions) is str:
+ extensions = (extensions,)
+ for ext in extensions:
+ if os.path.isfile(basename + ext):
+ return File(basename + ext)
+ return None
+
+class EquationOutput:
+ def __init__(self, eq_name, balance_name):
+ self.eq_name: str = eq_name
+ self.spatial_file: File = search_file(eq_name+"_fields", (".msh", ".pvd"))
+ self.balance_file: File = search_file(balance_name+"_balance", ".txt"),
+ self.observe_file: File = search_file(eq_name+"_observe", ".yaml")
+
+ def _load_yaml_output(self, file, basename):
+ if file is None:
+ raise FileNotFoundError(f"Not found Flow123d output file: {self.eq_name}_{basename}.yaml.")
+ with open(file.path, "r") as f:
+ loaded_yaml = yaml.load(f, yaml.CSafeLoader)
+ return dotdict.create(loaded_yaml)
+
+ def observe_dict(self):
+ return self._load_yaml_output(self.observe_file, 'observe')
+
+ def balance_dict(self):
+ return self._load_yaml_output(self.balance_file, 'balance')
+
+ def balance_df(self):
+ """
+ create a dataframe for the Balance file
+ rows for times, columns are tuple (region, value),
+ values =[ flux, flux_in, flux_out, mass, source, source_in, source_out, flux_increment, source_increment, flux_cumulative, source_cumulative, error ]
+ :return:
+ TODO: ...
+ """
+ dict = self.balance_dict()
+ pass
+
+
+
+class FlowOutput:
+
+ def __init__(self, process: subprocess.CompletedProcess, stdout: File, stderr: File, output_dir="output"):
+ self.process = process
+ self.stdout = stdout
+ self.stderr = stderr
+ with workdir(output_dir):
+ self.log = File("flow123.0.log")
+ # TODO: flow ver 4.0 unify output file names
+ self.hydro = EquationOutput("flow", "water")
+ self.solute = EquationOutput("solute", "mass")
+ self.mechanic = EquationOutput("mechanics", "mechanics")
+
+ @property
+ def success(self):
+ return self.process.returncode == 0
+
+ def check_conv_reasons(self):
+ """
+ Check correct convergence of the solver.
+ Reports the divergence reason and returns false in case of divergence.
+ """
+ with open(self.log.path, "r") as f:
+ for line in f:
+ tokens = line.split(" ")
+ try:
+ i = tokens.index('convergence')
+ if tokens[i + 1] == 'reason':
+ value = tokens[i + 2].rstrip(",")
+ conv_reason = int(value)
+ if conv_reason < 0:
+ print("Failed to converge: ", conv_reason)
+ return False
+ except ValueError:
+ continue
+ return True
+
+#@memoize
+def _prepare_inputs(file_in, params):
+ in_dir, template = os.path.split(file_in)
+ root = template.removesuffix(".yaml").removesuffix("_tmpl")
+ template_path = Path(file_in).rename(Path(in_dir) / (root + "_tmpl.yaml"))
+ #suffix = "_tmpl.yaml"
+ #assert template[-len(suffix):] == suffix
+ #filebase = template[:-len(suffix)]
+ main_input = Path(in_dir) / (root + ".yaml")
+ main_input, used_params = substitute_placeholders(str(template_path), str(main_input), params)
+ return main_input
+
+#@memoize
+def _flow_subprocess(arguments, main_input):
+ filebase, ext = os.path.splitext(os.path.basename(main_input.path))
+ arguments.append(main_input.path)
+ logging.info("Running Flow123d: " + " ".join(arguments))
+
+ stdout_path = filebase + "_stdout"
+ stderr_path = filebase + "_stderr"
+ with open(stdout_path, "w") as stdout:
+ with open(stderr_path, "w") as stderr:
+ print("Call: ", ' '.join(arguments))
+ completed = subprocess.run(arguments, stdout=stdout, stderr=stderr)
+ return File(stdout_path), File(stderr_path), completed
+
+#@report
+#@memoize
+def call_flow(cfg:'dotdict', file_in:File, params: Dict[str,str]) -> FlowOutput:
+ """
+ Run Flow123d in actual work dir with main input given be given template and dictionary of parameters.
+
+ 1. prepare the main input file from filebase_in + "_tmpl.yamlL"
+ 2. run Flow123d
+
+ TODO: pass only flow configuration
+ """
+ main_input = _prepare_inputs(file_in, params)
+ stdout, stderr, completed = _flow_subprocess(cfg.flow_executable.copy(), main_input)
+ logging.info(f"Exit status: {completed.returncode}")
+ if completed.returncode != 0:
+ with open(stderr.path, "r") as stderr:
+ print(stderr.read())
+ raise Exception("Flow123d ended with error")
+
+ fo = FlowOutput(completed, stdout.path, stderr.path)
+ conv_check = fo.check_conv_reasons()
+ logging.info(f"converged: {conv_check}")
+ return fo
+
+# TODO:
+# - call_flow variant with creating dir, copy,
+
+
diff --git a/src/bgem/core/memoize.py b/src/bgem/core/memoize.py
new file mode 100644
index 0000000..342adb2
--- /dev/null
+++ b/src/bgem/core/memoize.py
@@ -0,0 +1,191 @@
+import logging
+from typing import *
+#import redis_cache
+import hashlib
+from functools import wraps
+import time
+import os
+
+
+"""
+TODO: modify redis_simple_cache or our memoize decorator to hash also function code
+ see https://stackoverflow.com/questions/18134087/how-do-i-check-if-a-python-function-changed-in-live-code
+ that one should aslo hash called function .. the whole tree
+ more over we should also hash over serialization of classes
+"""
+
+class EndorseCache:
+ pass
+ # __instance__ = None
+ # @staticmethod
+ # def instance(*args, **kwargs):
+ # if EndorseCache.__instance__ is None:
+ # EndorseCache.__instance__ = EndorseCache(*args, **kwargs)
+ # return EndorseCache.__instance__
+ #
+ # def __init__(self, host="localhost", port=6379):
+ # # TODO: possibly start redis server
+ # self.cache = redis_cache.SimpleCache(10000, hashkeys=True, host=host, port=port)
+ #
+ #
+ # def expire_all(self):
+ # self.cache.expire_all_in_set()
+
+# Workaround missing module in the function call key
+# def memoize():
+# endorse_cache = EndorseCache.__instance__
+# def decorator(fn):
+# # redis-simple-cache does not include the function module into the key
+# # we poss in a functions with additional parameter
+# def key_fn(fn_id , *args, **kwargs):
+# return fn(*args, **kwargs)
+#
+# modif_fn = redis_cache.cache_it(limit=10000, expire=redis_cache.DEFAULT_EXPIRY, cache=endorse_cache.cache)(key_fn)
+#
+# @wraps(fn)
+# def wrapper(*args, **kwargs):
+# return modif_fn((fn.__name__, fn.__module__), *args, **kwargs)
+# return wrapper
+# return decorator
+
+def memoize(fn):
+ endorse_cache = EndorseCache.instance()
+ redis_cache_deco = redis_cache.cache_it(limit=10000, expire=redis_cache.DEFAULT_EXPIRY, cache=endorse_cache.cache)
+ return redis_cache_deco(fn)
+
+
+
+
+class File:
+ """
+ An object that should represent a file as a computation result.
+ Contains the path and the file content hash.
+ The system should also prevent modification of the files that are already created.
+ To this end one has to use File.open instead of the standard open().
+ Current usage:
+
+ with File.open(path, "w") as f:
+ f.write...
+
+ return File.from_handle(f) # check that handel was opened by File.open and is closed, performs hash.
+
+ Ideally, the File class could operate as the file handle and context manager.
+ However that means calling system open() and then modify its __exit__ method.
+ However I was unable to do that. Seems like __exit__ is changed, but changed to the original one smowere latter as
+ it is not called. Other possibility is to wrap standard file handle and use it like:
+
+ @joblib.task
+ def make_file(file_path, content):`
+ with File.open(file_path, mode="w") as f: # calls self.handle = open(file_path, mode)
+ f.handle.write(content)
+ # called File.__exit__ which calls close(self.handle) and performs hashing.
+ return f
+
+ TODO: there is an (unsuccessful) effort to provide special handle for writting.
+ TODO: Override deserialization in order to check that the file is unchanged.
+ Seems that caching just returns the object without actuall checking.
+ """
+
+ # @classmethod
+ # def handle(cls, fhandle):
+ # return File(fhandle.name)
+
+ # @classmethod
+ # def output(cls, path):
+ # """
+ # Create File instance intended for write.
+ # The hash is computed after call close of the of open() handle.
+ # Path is checked to not exists yet.
+ # """
+ # return cls(path, postponed=True)
+ _hash_fn = hashlib.md5
+ def __init__(self, path: str, files:List['File'] = None): # , hash:Union[bytes, str]=None) #, postponed=False):
+ """
+ For file 'path' create object containing both path and content hash.
+ Optionaly the files referenced by the file 'path' could be passed by `files` argument
+ in order to include their hashes.
+ :param path: str
+ :param files: List of referenced files.
+ """
+ self.path = os.path.abspath(path)
+ if files is None:
+ files = []
+ self.referenced_files = files
+ self._set_hash()
+
+ def __getstate__(self):
+ return (self.path, self.referenced_files)
+
+ def __setstate__(self, args):
+ self.path, self.referenced_files = args
+ self._set_hash()
+
+ def _set_hash(self):
+ files = self.referenced_files
+ md5 = self.hash_for_file(self.path)
+ for f in files:
+ md5.update(repr(f).encode())
+ self.hash = md5.hexdigest()
+
+ @staticmethod
+ def open(path, mode="wt"):
+ """
+ Mode could only be 'wt' or 'wb', 'x' is added automaticaly.
+ """
+ exclusive_mode = {"w": "x", "wt": "xt", "wb": "xb"}[mode]
+ # if os.path.isfile(path):
+ # raise ""
+ fhandle = open(path, mode=exclusive_mode) # always open for exclusive write
+ return fhandle
+
+ @classmethod
+ def from_handle(cls, handle):
+ assert handle.closed
+ assert handle.mode.find("x") != -1
+ return cls(handle.name)
+
+ def __hash__(self):
+ if self.hash is None:
+ raise Exception("Missing hash of output file.")
+ return hash(self.path, self.hash)
+
+ def __str__(self):
+ return f"File('{self.path}', hash={self.hash})"
+
+
+ """
+ Could be used from Python 3.11
+ @staticmethod
+ def hash_for_file(path):
+ with open(path, "rb") as f:
+ return hashlib.file_digest(f, "md5")
+
+ md5 = hashlib.md5()
+ with open(path, 'rb') as f:
+ for chunk in iter(lambda: f.read(block_size), b''):
+ md5.update(chunk)
+ return md5.digest()
+ """
+
+ @staticmethod
+ def hash_for_file(path):
+ '''
+ Block size directly depends on the block size of your filesystem
+ to avoid performances issues
+ Here I have blocks of 4096 octets (Default NTFS)
+ '''
+ block_size = 256 * 128
+ md5 = File._hash_fn()
+ try:
+ with open(path, 'rb') as f:
+ for chunk in iter(lambda: f.read(block_size), b''):
+ md5.update(chunk)
+ except FileNotFoundError:
+ raise FileNotFoundError(f"Missing cached file: {path}")
+ return md5
+
+
+"""
+
+
+"""
diff --git a/src/bgem/core/report.py b/src/bgem/core/report.py
new file mode 100644
index 0000000..c768a86
--- /dev/null
+++ b/src/bgem/core/report.py
@@ -0,0 +1,20 @@
+from functools import wraps
+import logging
+import time
+
+__report_indent_level = 0
+
+def report(fn):
+ @wraps(fn)
+ def do_report(*args, **kwargs):
+ global __report_indent_level
+ __report_indent_level += 1
+ init_time = time.perf_counter()
+ result = fn(*args, **kwargs)
+ duration = time.perf_counter() - init_time
+ __report_indent_level -= 1
+ indent = (__report_indent_level * 2) * " "
+ logging.info(f"{indent}DONE {fn.__module__}.{fn.__name__} @ {duration}")
+ return result
+ return do_report
+
diff --git a/src/bgem/fn.py b/src/bgem/fn.py
new file mode 100644
index 0000000..67c7a32
--- /dev/null
+++ b/src/bgem/fn.py
@@ -0,0 +1,22 @@
+"""
+Various function programming tools.
+"""
+
+# Various functional imports
+from functools import cached_property
+
+def compose(*functions):
+ """
+ Return composition of functions:
+ compose(A,B,C)(any args) is equivalent to A(B(C(any args))
+
+ Useful for functional programming and dependency injection.
+ """
+ def composed_function(*args, **kwargs):
+ # Start by applying the rightmost function with all arguments
+ result = functions[-1](*args, **kwargs)
+ # Then apply the rest in reverse order, each to the result of the previous
+ for f in reversed(functions[:-1]):
+ result = f(result)
+ return result
+ return composed_function
\ No newline at end of file
diff --git a/src/bgem/gmsh/gmsh.py b/src/bgem/gmsh/gmsh.py
index 412891c..1e82c25 100644
--- a/src/bgem/gmsh/gmsh.py
+++ b/src/bgem/gmsh/gmsh.py
@@ -2,12 +2,13 @@
from typing import *
from collections import defaultdict
import enum
-import attr
+import attrs
import numpy as np
import gmsh
import re
import warnings
+from bgem import Transform
from bgem.gmsh import gmsh_exceptions
from bgem.gmsh import options as gmsh_options
from bgem.gmsh import gmsh_io
@@ -53,18 +54,35 @@
- seems that occ.copy() doesn't preserve boundaries, so boundary dim tags are copied twice
(It does exactly what it is asked for just copy the given shapes)
(Problem resolved by introduction of select_by_intersection)
+
+Proposed functional apporach:
+
+Rationale:
+- ObjectSet keeps set of dimtags, which may become invalid, ObjectSets could become invalid because of operation of larger ObjectSet that includes it.
+
+ Resoluion: We would rather refactor ObjectSets into handles to the particular geometry part referenced by the complete tree of operations leading to it.
+ The whole geometry would be first constructed in terms of a DAG of operations and then applied to GMSH just before meshing.
+ The application would treat each operation as global, properly mapping dimtags to the new ons and mange properties associated with the
+ dimtags.
+- Getting boundary is not associative, bool opertations provides mapping only to dimtags explicitely provided as arguments.
+ Probably no way to map boundaries. Need to treat boundary operations in lazy way apply them more times using orig
+
+
"""
-@attr.s(auto_attribs=True)
+@attrs.define(auto_attribs=True, frozen=False)
class Region:
dim: Optional[int]
id: int
name: str
_boundary_region: 'Region' = None
+ _gmsh_id: int = None
+ # Physical domain ID in GMSH mesh
_max_reg_id = 99999
+
@classmethod
def get_region_id(cls):
cls._max_reg_id += 1
@@ -123,6 +141,21 @@ class MeshFormat(enum.IntEnum):
+class Mesh:
+ """
+ Interface to the GMSH mesh object with access to suitable mesh processing for
+ Flow123d simulation.
+ - mesh healing (with allowed small corruption of internal interfaces)
+ - region/physical domain association based on element entities
+ - region/physical domain association assigned after meshing, elements need not to respect interfaces
+ # - reading and writing field data on the mesh
+
+ GMSH model use global variables and could not be simply
+ interfaced using a functional approach.
+ Thus we rather want to copy mesh data to our own structures.
+
+
+ """
class GeometryOCC:
@@ -435,6 +468,14 @@ def cylinder(self, r=1, axis=[0, 0, 1], center=[0, 0, 0]):
return self.object(3, cylinder_tag)
def disc_discrete(self, radius=1, center=[0, 0, 0], n_points=6, axis=[0, 0, 1]):
+ """
+ Create a regular polygon with n_points vertices.
+ :param radius: Radius of
+ :param center:
+ :param n_points:
+ :param axis:
+ :return:
+ """
points = []
v = [1, 0, 0] # take a random vector
# test if v and axis are coplanar
@@ -520,8 +561,7 @@ def make_fractures(self, fractures, base_shape: 'ObjectSet'):
for i, fr in enumerate(fractures):
shape = base_shape.copy()
print("fr: ", i, "tag: ", shape.dim_tags)
- shape = shape.scale([fr.rx, fr.ry, 1]) \
- .rotate(axis=fr.rotation_axis, angle=fr.rotation_angle) \
+ shape = shape.transfrom(fr.transform_mat) \
.translate(fr.center) \
.set_region(fr.region)
@@ -855,6 +895,9 @@ def translate(self, vector):
self.factory._need_synchronize = True
return self
+ def transform(self, transform:Transform):
+ self.factory.model.affineTransform(self.dim_tags, transform.full_affine_matrix)
+
def rotate(self, axis, angle, center=[0, 0, 0]):
self.factory.model.rotate(self.dim_tags, *center, *axis, angle)
self.factory._need_synchronize = True
diff --git a/src/bgem/gmsh/heal_mesh.py b/src/bgem/gmsh/heal_mesh.py
index 20134b7..cf28dc4 100644
--- a/src/bgem/gmsh/heal_mesh.py
+++ b/src/bgem/gmsh/heal_mesh.py
@@ -1,8 +1,9 @@
+import logging
import os
import collections
import numpy as np
from typing import Tuple
-import attr
+import attrs
class ShapeBase:
@@ -101,7 +102,7 @@ def gmsh_gamma(self):
# circum radius
a,b,c,A,B,C = self.edge_lens
x_area = (a*A + b*B + c*C)*(a*A + b*B - c*C)*(a*A - b*B + c*C)*(-a*A + b*B + c*C)
- assert x_area > 0, x_area
+ # assert x_area > 0, x_area
R = np.sqrt(x_area) / 24 / max(1e-300, V)
self._gamma = 3 * r/max(1e-300, R)
return self._gamma
@@ -200,7 +201,7 @@ def edge_lens(self):
-@attr.s(auto_attribs=True)
+@attrs.define(auto_attribs=True)
class Element:
eid: int
type: int
@@ -503,7 +504,6 @@ def stats_to_yaml(self, filename, el_tol=0.01):
def heal_mesh(self, gamma_tol=0.02, fraction_of_new_els=2):
-
self.gamma_tol = gamma_tol
orig_n_el = self.max_ele_id
el_to_check = collections.deque(self.mesh.elements.keys())
diff --git a/src/bgem/stochastic/__init__.py b/src/bgem/stochastic/__init__.py
index e69de29..b55d59e 100644
--- a/src/bgem/stochastic/__init__.py
+++ b/src/bgem/stochastic/__init__.py
@@ -0,0 +1,4 @@
+from .dfn import Population, PowerLawSize, UniformBoxPosition, VonMisesOrientation, FisherOrientation
+from .fr_set import Fracture, FractureSet, EllipseShape, RectangleShape, PolygonShape
+from .fr_mesh import geometry_gmsh, geometry_brep_writer
+ # SquareShape, LineShape, DiscShape, ConvexPolygon)
\ No newline at end of file
diff --git a/src/bgem/stochastic/dfn.py b/src/bgem/stochastic/dfn.py
index 3180e50..a4652c0 100644
--- a/src/bgem/stochastic/dfn.py
+++ b/src/bgem/stochastic/dfn.py
@@ -1,9 +1,997 @@
"""
-Module for:
-1. creation of the BREP model from the list of fractures (deal with intersections)
-2. fracture network manipulations and simplifications to allow good meshing
+Module for statistical description of the fracture networks.
+It provides appropriate statistical models as well as practical sampling methods.
-.. TO Be Done
+TODO:
+- move pos_distr into Population configuration as well
+- shape modification as separate fn,, or part of other population reconfiguration functions (common range)
+- some concept of correlation within and between families:
+ - define generation of next fracture conditioned by the previously generated set
+ define conditional probability as a |weighted) superposition of conditional probablilities from individual generated fractures.
+ - considering a multiscale nature of the fractures the largest fractures can not be sampled since we have just single
+ sample, for smaller scales we can consider observations at distinct locations independent, but still affected by
+ "semi deterministic" large scale. Generating from large deterministic structures down to the smaller scales using
+ conditional probablilities. Caould possibly bridge gap between deterministic and completerly random.
+ view generation as a process with next fracture depends on previously generated set
+ - Consider much simpler 2D cases for developing a suitable model allowint transition from deterministic large scale features
+ to fully stochastic small scale.
+ A. Random placemnt of discs (or circles) with power law size distribution and nonstationary density.
+ B. Same but with spatial correlation of positions.
+ C. 2D lines.
+ The 2d models are good for development even if has low applicability (?? other random processes
"""
+from typing import *
+from pathlib import Path
+import numpy as np
+#import attr
+import attrs
+import math
+import json
+import yaml
+from bgem.stochastic import fr_set
+
+"""
+Auxiliary normal manipulation functions.
+TODO: tr to remove, incorporate into FractureSet if needed.
+"""
+
+
+@attrs.define
+class VonMisesOrientation:
+ """
+ Distribution for random orientation in 2d.
+ X = east, Y = north
+ """
+
+ trend: float = 0
+ # azimuth (0, 360) of the fractures normal
+ concentration: float = 0
+ # concentration parameter, 0 = uniformely dispersed, 1 = exect orientation
+
+ def sample_axis_angle(self, size=1):
+ """
+ Sample fracture orientation angles.
+ :param size: Number of samples
+ :return: shape (n, 4), every row: unit axis vector and angle
+ """
+ axis_angle = np.tile(np.array([0, 0, 1, 0], dtype=float), size).reshape((size, 4))
+ axis_angle[:, 3] = self.sample_angle(size)
+ return axis_angle
+
+
+ def sample_angle(self, size=1):
+ trend = np.radians(self.trend)
+ if self.concentration > np.log(np.finfo(float).max):
+ return trend + np.zeros(size)
+ else:
+ if self.concentration == 0:
+ return np.random.uniform(size=size) * 2 * np.pi
+ else:
+ return np.random.vonmises(mu=trend, kappa=self.concentration, size=size)
+
+ def sample_normal(self, size=1):
+ """
+ Draw samples for the fracture normals.
+ :param size: number of samples
+ :return: array (n, 3)
+ """
+ angle = self.sample_angle(size)
+ return np.stack([np.cos(angle), np.sin(angle), np.zeros_like(angle)], axis=1)
+
+
+@attrs.define
+class FisherOrientation:
+ """
+ Distribution for random orientation in 3d.
+
+ Coordinate system: X - east, Y - north, Z - up
+
+ strike, dip - used for the orientation of the planar geological features
+ trend, plunge - used for the orientation of the line geological features
+
+ As the distribution is considerd as distribution of the fracture normal vectors we use
+ trend, plunge as the primal parameters.
+ """
+
+ trend: float
+ # mean fracture normal (pointing down = negative Z)
+ # azimuth (0, 360) of the normal's projection to the horizontal plane
+ # related term is the strike = trend - 90; that is azimuth of the strike line
+ # - the intersection of the fracture with the horizontal plane
+ plunge: float
+ # mean fracture normal (pointing down = = negative Z)
+ # angle (0, 90) between the normal and the horizontal plane
+ # related term is the dip = 90 - plunge; that is the angle between the fracture and the horizontal plane
+ #
+ # strike and dip can by understood as the first two Eulerian angles.
+ concentration: float
+ # the concentration parameter; 0 = uniform dispersion, infty - no dispersion
+
+ @staticmethod
+ def strike_dip(strike, dip, concentration):
+ """
+ Initialize from (strike, dip, concentration)
+ """
+ return FisherOrientation(strike + 90, 90 - dip, concentration)
+
+ def _sample_standard_fisher(self, n) -> np.array:
+ """
+ Normal vector of random fractures with mean direction (0,0,1).
+ :param n:
+ :return: array of normals (n, 3)
+ """
+ if self.concentration > np.log(np.finfo(float).max):
+ normals = np.zeros((n, 3))
+ normals[:, 2] = 1.0
+ else:
+ unif = np.random.uniform(size=n)
+ psi = 2 * np.pi * np.random.uniform(size=n)
+ cos_psi = np.cos(psi)
+ sin_psi = np.sin(psi)
+ if self.concentration == 0:
+ cos_theta = 1 - 2 * unif
+ else:
+ exp_k = np.exp(self.concentration)
+ exp_ik = 1 / exp_k
+ cos_theta = np.log(exp_k - unif * (exp_k - exp_ik)) / self.concentration
+ sin_theta = np.sqrt(1 - cos_theta ** 2)
+ # theta = 0 for the up direction, theta = pi for the down direction
+ normals = np.stack((sin_psi * sin_theta, cos_psi * sin_theta, cos_theta), axis=1)
+ return normals
+
+ def sample_normal(self, size=1):
+ """
+ Draw samples for the fracture normals.
+ :param size: number of samples
+ :return: array (n, 3)
+ """
+ raw_normals = self._sample_standard_fisher(size)
+ mean_norm = self._mean_normal()
+ axis_angle = fr_set.normals_to_axis_angles(mean_norm[None, :])
+ return fr_set.rotate(raw_normals, axis_angle=axis_angle[0])
+
+
+ #def sample_axis_angle(self, size=1):
+ # """
+ # Sample fracture orientation angles.
+ # :param size: Number of samples
+ # :return: shape (n, 4), every row: unit axis vector and angle
+ # """
+ # normals = self._sample_normal(size)
+ # return self.normal_to_axis_angle(normals[:])
+
+
+ def _mean_normal(self):
+ trend = np.radians(self.trend)
+ plunge = np.radians(self.plunge)
+ normal = np.array([np.sin(trend) * np.cos(plunge),
+ np.cos(trend) * np.cos(plunge),
+ -np.sin(plunge)])
+
+ # assert np.isclose(np.linalg.norm(normal), 1, atol=1e-15)
+ return normal
+
+ # def normal_2_trend_plunge(self, normal):
+ #
+ # plunge = round(degrees(-np.arcsin(normal[2])))
+ # if normal[1] > 0:
+ # trend = round(degrees(np.arctan(normal[0] / normal[1]))) + 360
+ # else:
+ # trend = round(degrees(np.arctan(normal[0] / normal[1]))) + 270
+ #
+ # if trend > 360:
+ # trend = trend - 360
+ #
+ # assert trend == self.trend
+ # assert plunge == self.plunge
+
+
+# class Position:
+# def __init__(self):
+
+
+
+Interval = Tuple[float, float]
+@attrs.define
+class PowerLawSize:
+ """
+ Truncated Power Law distribution for the fracture size 'r'.
+ The density function:
+
+ f(r) = f_0 r ** (-power - 1)
+
+ for 'r' in [size_min, size_max], zero elsewhere.
+
+ The class allows to set a different (usually reduced) sampling range for the fracture sizes,
+ one can either use `set_sample_range` to directly set the sampling range or just increase the lower bound to meet
+ prescribed fracture intensity via the `set_range_by_intansity` method.
+
+ """
+ power = attrs.field(type=float)
+ # power of th power law
+ diam_range = attrs.field(type=Interval)
+ # lower and upper bound of the power law for the fracture diameter (size), values for which the intensity is given
+ intensity = attrs.field(type=float)
+ # number of fractures with size in the size_range per unit volume (denoted as P30 in SKB reports)
+
+ sample_range = attrs.field(type=Interval)
+ # range used for sampling., not part of the statistical description
+
+ # default attrs initiaizer:
+ @sample_range.default
+ def copy_full_range(self):
+ return list(self.diam_range).copy() # need copy to preserve original range
+
+ @classmethod
+ def from_mean_area(cls, power, diam_range, p32, p32_power=None):
+ """
+ Construct the distribution using the mean arrea (P32) instead of intensity.
+ :param power: power law exponent
+ :param dim_range: size range for which p32 mean area is given
+ :param p32: mean area of the fractures in given `diam_range`.
+ :param p32_power: if the mean area is given for different power parameter.
+ :return: PowerLawSize instance.
+ """
+ if p32_power is None:
+ p32_power = power
+ intensity = cls.intensity_for_mean_area(p32, power, diam_range, p32_exp=p32_power)
+ return cls(power, diam_range, intensity)
+
+ def cdf(self, x, range):
+ """
+ Power law distribution function for the given support interval (min, max).
+ """
+ min, max = range
+ pmin = min ** (-self.power)
+ pmax = max ** (-self.power)
+ return (pmin - x ** (-self.power)) / (pmin - pmax)
+
+ def ppf(self, x, range):
+ """
+ Power law quantile (inverse distribution) function for the given support interval (min, max).
+ """
+ min, max = range
+ pmin = min ** (-self.power)
+ pmax = max ** (-self.power)
+ scaled = pmin - x * (pmin - pmax)
+ return scaled ** (-1 / self.power)
+
+ def range_intensity(self, range):
+ """
+ Computes the fracture intensity (P30) for different given fracture size range.
+ :param range: (min, max) - new fracture size range
+ """
+ a, b = self.diam_range
+ c, d = range
+ k = self.power
+ return self.intensity * (c ** (-k) - d ** (-k)) / (a ** (-k) - b ** (-k))
+
+ def set_sample_range(self, sample_range=None):
+ """
+ Set the range for the fracture sampling.
+ :param sample_range: (min, max), None to reset to the full range.
+ DEPRECATED Use extract_range for the functional API.
+ """
+ if sample_range is None:
+ sample_range = self.diam_range
+ self.sample_range = list(sample_range).copy()
+
+ def extract_range(self, sample_range):
+ return PowerLawSize(
+ self.power,
+ self.diam_range,
+ self.intensity,
+ sample_range = sample_range)
+
+ def _range_for_intensity(self, intensity, i_bound=0):
+ a, b = self.diam_range
+ c, d = self.sample_range
+ k = self.power
+ if i_bound == 0:
+ lower_bound = (intensity * (a ** (-k) - b ** (-k)) / self.intensity + d ** (-k)) ** (-1 / k)
+ return (lower_bound, self.sample_range[1])
+ else:
+ upper_bound = (c ** (-k) - intensity * (a ** (-k) - b ** (-k)) / self.intensity ) ** (-1 / k)
+ return (self.sample_range[0], upper_bound)
+
+
+ def set_lower_bound_by_intensity(self, intensity):
+ """
+ Increase lower fracture size bound of the sample range in order to achieve target fracture intensity.
+ DEPRECATED
+ """
+ self.sample_range = self._range_for_intensity(intensity, i_bound=0)
+
+ def set_upper_bound_by_intensity(self, intensity):
+ """
+ Increase lower fracture size bound of the sample range in order to achieve target fracture intensity.
+ DEPRECATED
+ """
+ self.sample_range = self._range_for_intensity(intensity, i_bound=1)
+
+ def mean_size(self, volume=1.0):
+ """
+ :return: Mean number of fractures for given volume
+ """
+ sample_intensity = self.range_intensity(self.sample_range)
+ return sample_intensity * volume
+
+ def sample(self, volume, size=None, force_nonempty=False):
+ """
+ Sample the fracture diameters.
+ :param volume: By default the volume and fracture sample intensity is used to determine actual number of the fractures.
+ :param size: ... alternatively the prescribed number of fractures can be generated.
+ :param force_nonempty: If True at leas one fracture is generated.
+ :return: Array of fracture sizes.
+ """
+ if size is None:
+ size = np.random.poisson(lam=self.mean_size(volume), size=1)
+ if force_nonempty:
+ size = max(1, size)
+ #print("PowerLaw sample: ", force_nonempty, size)
+ U = np.random.uniform(0, 1, int(size))
+ return self.ppf(U, self.sample_range)
+
+ def mean_area(self, volume=1.0, shape_area=1.0):
+ """
+ Compute mean fracture surface area from current sample range intensity.
+ :param shape_area: Area of the unit fracture shape (1 for square, 'pi/4' for disc)
+ :return:
+ """
+ sample_intensity = volume * self.range_intensity(self.sample_range)
+ a, b = self.sample_range
+ exp = self.power
+ integral_area = (b ** (2 - exp) - a ** (2 - exp)) / (2 - exp)
+ integral_intensity = (b ** (-exp) - a ** (-exp)) / -exp
+ p_32 = sample_intensity / integral_intensity * integral_area * shape_area
+ return p_32
+
+ @staticmethod
+ def intensity_for_mean_area(p_32, exp, size_range, shape_area=1.0, p32_exp=None):
+ """
+ Compute fracture intensity from the mean fracture surface area per unit volume.
+ :param p_32: mean fracture surface area
+ :param exp: power law exponent
+ :param size_range: fracture size range
+ :param shape_area: Area of the unit fracture shape (1 for square, 'pi/4' for disc)
+ :param p32_exp: possibly different value of the power parameter for which p_32 mean area is given
+ :return: p30 - fracture intensity
+
+ TODO: modify to general recalculation for two different powers and introduce separate wrapper functions
+ for p32 to p30, p32 to p20, etc. Need to design suitable construction methods.
+ """
+ if p32_exp is None:
+ p32_exp = exp
+ a, b = size_range
+ integral_area = (b ** (2 - p32_exp) - a ** (2 - p32_exp)) / (2 - p32_exp)
+ integral_intensity = (b ** (-exp) - a ** (-exp)) / -exp
+ return p_32 / integral_area / shape_area * integral_intensity
+
+
+# @attr.s(auto_attribs=True)
+# class PoissonIntensity:
+# p32: float
+# # number of fractures
+# size_min: float
+# #
+# size_max:
+# def sample(self, box_min, box_max):
+
+@attrs.define
+class UniformBoxPosition:
+ dimensions = attrs.field(type=List[float], converter=np.array)
+ center= attrs.field(type=List[float], converter=np.array, default=np.zeros(3))
+ # TODO: default center should be dimensions / 2 !! see DIFF
+
+ def sample(self, size=1):
+ # size = 1
+ # pos = np.empty((size, 3), dtype=float)
+ # for i in range(3):
+ # pos[:, i] = np.random.uniform(self.center[i] - self.dimensions[i]/2, self.center[i] + self.dimensions[i]/2, size)
+ pos = np.empty(3, dtype=float)
+ return (np.random.random([size, 3]) - 0.5) * self.dimensions[None, :] + self.center[None, :]
+
+ @property
+ def volume(self):
+ return np.prod(self.dimensions)
+
+@attrs.define
+class ConnectedPosition:
+ """
+ Generate a fracture positions in such way, that all fractures are connected to some of the initial surfaces.
+ Sampling algorithm:
+ 0. sampling position of the i-th fracture:
+ 1. select random surface using theoretical frequencies of the fractures:
+ f_k = N_k / (N_f - k), with N_k ~ S_k, S_k is the area of k-th surface
+ ... this is done by taking a random number from (0, sum f_k) and determining 'k'
+ by search in the array of cumulative frequencies (use dynarray package).
+ 2. one point of the N_k points in k-th surface
+ 3. center of the new fracture such, that it contains the selected point
+
+ N_k is obtained as:
+ 1. generate N_p * S_i points
+ 2. remove points that are close to some existing points on other fractures
+
+ Possible improvements:
+ Instead of grouping points according to fractures, make groups of points according to some volume cells.
+ This way one can obtain more uniform distribution over given volume.
+ """
+
+ confining_box: List[float]
+ # dimensions of the confining box (center in origin)
+ point_density: float
+ # number of points per unit square
+
+ # List of fractures, fracture is the transformation matrix (4,3) to transform from the local UVW coordinates to the global coordinates XYZ.
+ # Fracture in UvW: U=(-1,1), V=(-1,1), W=0.
+
+ all_points: List[np.array] = []
+ # all points on surfaces
+ surf_points: List[int] = []
+ # len = n surfaces + 1 - start of fracture's points in all_points, last entry is number of all points
+ surf_cum_freq: List[float] = []
+
+ # len = n surfaces + 1 - cumulative mean frequencies for surfaces; total_freq - the last entry is surf_cum_freq
+ # used for efficient sampling of the parent fracture index
+
+ @classmethod
+ def init_surfaces(cls, confining_box, n_fractures, point_density, points):
+ """
+ :param confinign_box: dimensions of axis aligned box, points out of this box are excluded.
+ :param point_density: number of points per unit square
+ :param points: List of 3d points on the virtual initial surface.
+ :return:
+ """
+ np = len(points)
+ freq = np / (n_fractures - 0)
+ return cls(confining_box, point_density, points.copy(), [0, np], [0, freq])
+
+ # TODO continue
+ def sample(self, diameter, axis, angle, shape_angle):
+ """
+ Sample position of the fracture with given shape and orientation.
+ :return:
+ sampling position of the i-th fracture:
+ 1. select random surface using theoretical frequencies of the fractures:
+ f_k = N_k / (N_f - k), with N_k ~ S_k, S_k is the area of k-th surface
+ ... this is done by taking a random number from (0, sum f_k) and determining 'k'
+ by search in the array of cumulative frequencies (use dynarray package).
+ 2. one point of the N_k points in k-th surface
+ 3. center of the new fracture such, that it contains the selected point
+
+ N_k is obtained as:
+ 1. generate N_p * S_i points
+ 2. remove points that are close to some existing points on other fractures
+
+ """
+
+ if len(self.fractures) == 0:
+ self.confining_box = np.array(self.confining_box)
+ # fill by box sides
+ self.points = np.empty((0, 3))
+ for fr_mat in self.boxes_to_fractures(self.init_boxes):
+ self.add_fracture(fr_mat)
+ # assert len(self.fractures) == len(self.surfaces)
+
+ q = np.random.uniform(-1, 1, size=3)
+ q[2] = 0
+ uvq_vec = np.array([[1, 0, 0], [0, 1, 0], q])
+ uvq_vec *= diameter / 2
+ uvq_vec = FisherOrientation.rotate(uvq_vec, np.array([0, 0, 1]), shape_angle)
+ uvq_vec = FisherOrientation.rotate(uvq_vec, axis, angle)
+
+ # choose the fracture to prolongate
+ i_point = np.random.randint(0, len(self.points), size=1)[0]
+ center = self.points[i_point] + uvq_vec[2, :]
+ self.add_fracture(self.make_fracture(center, uvq_vec[0, :], uvq_vec[1, :]))
+ return center
+
+ def add_fracture(self, fr_mat):
+ i_fr = len(self.fractures)
+ self.fractures.append(fr_mat)
+ surf = np.linalg.norm(fr_mat[:, 2])
+
+ points_density = 0.01
+ # mean number of points per unit square meter
+ points_mean_dist = 1 / np.sqrt(points_density)
+ n_points = np.random.poisson(lam=surf * points_density, size=1)
+ uv = np.random.uniform(-1, 1, size=(2, n_points[0]))
+ fr_points = fr_mat[:, 0:2] @ uv + fr_mat[:, 3][:, None]
+ fr_points = fr_points.T
+ new_points = []
+
+ for pt in fr_points:
+ # if len(self.points) >0:
+ dists_short = np.linalg.norm(self.points[:, :] - pt[None, :], axis=1) < points_mean_dist
+ # else:
+ # dists_short = []
+ if np.any(dists_short):
+ # substitute current point for a choosed close points
+ i_short = np.random.choice(np.arange(len(dists_short))[dists_short])
+ self.points[i_short] = pt
+ # self.point_fracture = i_fr
+ else:
+ # add new points that are in the confining box
+ if np.all((pt - self.confining_box / 2) < self.confining_box):
+ new_points.append(pt)
+ # self.point_fracture.append(i_fr)
+ if new_points:
+ self.points = np.concatenate((self.points, new_points), axis=0)
+
+ @classmethod
+ def boxes_to_fractures(cls, boxes):
+ fractures = []
+ for box in boxes:
+ box = np.array(box)
+ ax, ay, az, bx, by, bz = range(6)
+ sides = [[ax, ay, az, bx, ay, az, ax, ay, bz],
+ [ax, ay, az, ax, by, az, bx, ay, az],
+ [ax, ay, az, ax, ay, bz, ax, by, az],
+ [bx, by, bz, ax, by, bz, bx, by, az],
+ [bx, by, bz, bx, ay, bz, ax, by, bz],
+ [bx, by, bz, bx, by, az, bx, ay, bz]]
+ for side in sides:
+ v0 = box[side[0:3]]
+ v1 = box[side[3:6]]
+ v2 = box[side[6:9]]
+ fractures.append(cls.make_fracture(v0, v1 / 2, v2 / 2))
+ return fractures
+
+ @classmethod
+ def make_fracture(cls, center, u_vec, v_vec):
+ """
+ Construct transformation matrix from one square cornerthree square corners,
+ """
+ w_vec = np.cross(u_vec, v_vec)
+ return np.stack((u_vec, v_vec, w_vec, center), axis=1)
+
+
+FamilyCfg = Dict[str, Union[str, float, int]]
+PopulationDict = Dict[str, FamilyCfg]
+PopulationList = Dict[str, FamilyCfg] # Deprecated, list of Family cfg with "name" attribute
+PopulationCfg = Union[PopulationDict, Path, str]
+# Population configuration dict/list, or YAML or JASON input file
+
+
+@attrs.define
+class FrFamily:
+ """
+ Describes a single fracture family with defined distribution of:
+ - normal orientation
+ - shape orientation
+ - size orientation
+ - position distribution
+ - more complex correlation structure,
+ e.g. large fractures with independent orientations smaller with correlated orientations
+ needs more general sampling paradigm
+ """
+ orientation: FisherOrientation
+ size: PowerLawSize
+ shape_angle: VonMisesOrientation
+
+ name: Optional[str] = None
+ #position:
+ #correlation: None
+
+
+ @classmethod
+ def from_cfg(cls, family: FamilyCfg, name='') -> 'FrFamily':
+ trend = family.get("trend", None)
+ plunge = family.get("plunge", None)
+ if trend is None or plunge is None:
+ # use strike & dip instead
+ try:
+ trend = family.get("strike") + 90
+ plunge = 90 - family.get("dip")
+ except KeyError as e:
+ print("Uncomplete fracture family configuration. Use trend+plunge or strike+dip keys.")
+ raise e
+
+ fisher_orientation = FisherOrientation(trend, plunge, family["concentration"])
+ size_range = (family["r_min"], family["r_max"])
+ if "p_32" in family:
+ power_law_size = PowerLawSize.from_mean_area(family["power"], size_range, family["p_32"])
+ elif "p_30" in family:
+ power_law_size = PowerLawSize(family["power"], size_range, family["p_30"])
+ else:
+ raise KeyError("Missing p_32 or p_30 key in FrFamily config dictionary.")
+ assert np.isclose(family["p_32"], power_law_size.mean_area())
+ shape_angle = VonMisesOrientation(trend=0, concentration=0)
+ return cls(fisher_orientation, power_law_size, shape_angle, name=name)
+
+ @staticmethod
+ def project_cfg(family: FamilyCfg, plane_normal=[0,0,1]):
+ """
+
+ :param family:
+ :param plane_normal:
+ :return:
+ """
+ assert False, "Not implemented yet"
+ # Idea is to have specific dict key "2d_angle" that allows to differentiate
+ # 3d and 2d configurations.
+
+ orientation = FisherOrientation(0, 90, np.inf)
+ size_range = (family["r_min"], family["r_max"])
+ power_law_size = PowerLawSize.from_mean_area(family["power"], size_range, family["p_32"])
+ assert np.isclose(family["p_32"], power_law_size.mean_area())
+ shape_angle = VonMisesOrientation(family["trend"], family["concentration"])
+ return FrFamily(family["name"], orientation, power_law_size, shape_angle)
+
+ def with_size_range(self, size_range):
+ """
+ Copy of the family with modified fracture size range.
+ :param size_range:
+ :return:
+ """
+ return FrFamily(self.orientation, self.size.extract_range(size_range), self.shape_angle, self.name)
+
+ def sample(self, position_distribution, shape=fr_set.RectangleShape(), i_fam=0, force_size:int=None):
+ """
+ Generate FractureSet sample from the FrFamily.
+ :param position_distribution:
+ :param shape:
+ :param i_fam:
+ :return:
+ TODO: include position distribution into FrFamily, apply different domains as with change in sample_size
+ but rahter keep both separated from distributions and keep them common to all families at the population level.
+ Pass them down when sampling of computing size estimates.
+ TODO: add distribution of aspect (log normal with mean 1 and given log_10 sigma)
+ """
+ radii = self.size.sample(position_distribution.volume, size=force_size)
+ aspect = 1.0
+ radii = np.stack( (radii, aspect * radii), axis=1 )
+ n_fractures = len(radii)
+ shape_angle = self.shape_angle.sample_angle(size=n_fractures)
+ shape_axis = np.stack((np.cos(shape_angle), np.sin(shape_angle)), axis=1)
+ return fr_set.FractureSet(
+ base_shape_idx=shape.id,
+ radius=radii,
+ normal=self.orientation.sample_normal(size=n_fractures),
+ center=position_distribution.sample(size=n_fractures),
+ shape_axis=shape_axis,
+ family=np.full(n_fractures, i_fam)
+ )
+
+
+
+
+@attrs.define
+class Population:
+ """
+ Data class to describe whole population of fractures, several families.
+ Supports sampling across the families.
+ """
+ # Attributes
+ families: List[FrFamily]
+ # families list
+ domain: Tuple[float, float, float]
+ # dimensions of the box domain, the Z dimension is = 0 for 2d population
+ shape: fr_set.BaseShape = fr_set.RectangleShape()
+ # Reference Shape of generated fractures
+
+ __loaders = {
+ '.json': json.load,
+ '.yaml': yaml.safe_load
+ }
+
+ @property
+ def volume(self):
+ return np.product([l if l>0 else 1.0 for l in self.domain])
+
+ @staticmethod
+ def project_list_to_2d(families: PopulationDict, plane_normal=[0, 0, 1]):
+ """
+ Convert families as dicts into 2d.
+ :return:
+ """
+ return {k:FrFamily.project_cfg(v, plane_normal) for k,v in families.items()}
+
+ @classmethod
+ def from_cfg(cls, families: PopulationDict, box, shape=fr_set.RectangleShape()):
+ """
+ Load families from a list of dict, with keywords: [ name, trend, plunge, concentration, power, r_min, r_max, p_32 ]
+ Assuming fixed statistical model: Fischer, Uniform, PowerLaw Poisson
+ """
+ if isinstance(families, (str, Path)):
+ # Load from file.
+ path = Path(families)
+ with open(path) as f:
+ fam_cfg = cls.__loaders[path.suffix](f)
+ else:
+ fam_cfg = families
+ if isinstance(fam_cfg, dict):
+ families = [FrFamily.from_cfg(family, name=family_key) for family_key, family in fam_cfg.items()]
+ elif isinstance(fam_cfg, list):
+ families = [FrFamily.from_cfg(family, name=family['name']) for family in fam_cfg]
+ else:
+ raise TypeError("Families (possibly loaded from provied file path) must be either dictionary or list of dictionaries with the 'name' item.")
+
+ return cls(families, box, shape)
+
+ # @classmethod
+ # def initialize_2d(cls, families: List[Dict[str, Any]], box):
+ # """
+ # Load families from a list of dict, with keywords: [ name, trend, plunge, concentration, power, r_min, r_max, p_32 ]
+ # Assuming fixed statistical model: Fischer, Uniform, PowerLaw Poisson
+ # :param families json_file: JSON file with families data
+ # """
+ # families = [FrFamily.from_cfg_2d(family) for family in families]
+ # assert len(box) == 3 and sum((l > 0 for l in box)) == 2
+ # return cls(box, families)
+
+ @classmethod
+ def from_json(cls, json_file, box) -> 'Population':
+ """
+ Load families from a JSON file. Assuming fixed statistical model: Fischer, Uniform, PowerLaw Poisson
+ :param json_file: JSON file with families data
+ DEPRECATED use from_cfg
+ """
+ return cls.from_cfg(json_file, box)
+
+ @classmethod
+ def init_from_yaml(cls, yaml_file:str, box) -> 'Population':
+ """
+ Load families from a YAML file. Assuming fixed statistical model: Fischer, Uniform, PowerLaw Poisson
+ :param json_file: YAML file with families data
+ DEPRECATED use from _cfg
+ """
+ return cls.from_cfg(yaml_file, box)
+
+
+ def mean_size(self):
+ """
+ Mean number of fractures for the set sample range.
+ :return:
+
+ """
+ sizes = [family.size.mean_size(self.volume) for family in self.families]
+ return sum(sizes)
+
+ def set_range_from_size(self, sample_size):
+ """
+ :param sample_size:
+ :return: Population with new common fracture range.
+ """
+ return self.set_sample_range(self.common_range_for_sample_size(sample_size))
+
+ def set_sample_range(self, sample_range):
+ """
+ Set sample range for fracture diameter.
+ :param sample_range: (min_bound, max_bound) - one of these can be None if 'sample_size' is provided
+ this bound is set to match mean number of fractures
+ #:param sample_size: If provided, the None bound is changed to achieve given mean number of fractures.
+ # If neither of the bounds is None, the lower one is reset.
+ # DEPRECATED. Use self.set_sample_range(self.coommon_range_for_sample_size(target_size)
+ :return: Population with new common fracture range.
+ """
+ families = [fam.with_size_range(sample_range) for fam in self.families]
+ return Population(families, self.domain, self.shape)
+ # min_size, max_size = sample_range
+ # for f in self.families:
+ # r_min, r_max = f.size.diam_range
+ # if min_size is not None:
+ # r_min = min_size
+ # if max_size is not None:
+ # r_max = max_size
+ # f.size.set_sample_range((r_min, r_max))
+ # if sample_size is not None:
+ # family_sizes = [family.size.mean_size(self.volume) for family in self.families]
+ # total_size = np.sum(family_sizes)
+ #
+ # if max_size is None:
+ # for f, size in zip(self.families, family_sizes):
+ # family_intensity = size / total_size * sample_size / self.volume
+ # f.size.set_upper_bound_by_intensity(family_intensity)
+ # else:
+ # for f, size in zip(self.families, family_sizes):
+ # family_intensity = size / total_size * sample_size / self.volume
+ # f.size.set_lower_bound_by_intensity(family_intensity)
+
+ def common_range_for_sample_size(self, sample_size=None, free_bound=0, initial_range=None) -> Interval:
+ """
+ Compute common size range accross families for given mean sample size.
+ :param sample_size: Target mean number of fractures in the population. Sum of mean sample sizes over families.
+ If None, current mean size is used, so we only compute common size range that preserve same mean sample size.
+
+ Setting the common family bound to obtain prescribed sample size is a nonlinear
+ yet monotone problem. Therefore, we apply simple iterating strategy to find correct bound.
+ 1. split the total sample_size according to current range family intensities
+ 2. compute common new bound for each family, set the common bound as the median of these
+ 3. continue to 1. if the estimated number of samples match prescribed sample size with error up to 1.
+ :param free_bound index of bound (0-lower, 1-upper) to adapt.
+ :param initial_range - initial range of the iterative algorithm; median of family sample ranges by default
+ """
+ if sample_size is None:
+ sample_size = self.mean_size()
+ target_total_intenzity = sample_size / self.volume
+ if initial_range is None:
+ fam_ranges = np.array([f.size.sample_range for f in self.families])
+ initial_range = np.median(fam_ranges, axis=0)
+ common_range = initial_range
+
+ fn_fam_intensities = lambda range: [f.size.range_intensity(range) for f in self.families]
+ def fn_update_ranges(intensities):
+ rel_total_intensity = target_total_intenzity / sum(intensities)
+ return [f.size._range_for_intensity(intensity * rel_total_intensity, i_bound=free_bound)
+ for f, intensity in zip(self.families, intensities)]
+
+ intensities = fn_fam_intensities(common_range)
+ while (sum(intensities) - target_total_intenzity) * self.volume > 1:
+ update_ranges = fn_update_ranges(intensities)
+ common_range = np.median(update_ranges, axis=0)
+ intensities = fn_fam_intensities(common_range)
+ return common_range
+
+ def extract_size_range(self, range: Interval) -> 'Population':
+ """
+ Copy of population with modified size distribution set to prescribed sample range.
+ :return:
+ """
+ families = [
+ FrFamily(fam.orientation, fam.size.extract_range(range), fam.shape_angle, fam.name)
+ for fam in self.families
+ ]
+ return Population(
+ families=families,
+ domain=self.domain,
+ shape=self.shape
+ )
+
+
+ def sample(self, pos_distr=None, keep_nonempty=False) -> fr_set.FractureSet:
+ """
+ Provide a single fracture set sample from the population.
+ :param pos_distr: Fracture position distribution, common to all families.
+ An object with method .sample(size) returning array of positions (size, 3).
+ :return: List of FractureShapes.
+ TODO: move position distribution into FrFamily for consistency
+ TODO: set sample size and seed here, both optional
+ """
+ if pos_distr is None:
+ pos_distr = UniformBoxPosition(self.domain)
+ fr_fam_sets = [fam.sample(pos_distr, self.shape, i_fam=i_fam) for i_fam, fam in enumerate(self.families)]
+ fracture_set = fr_set.FractureSet.merge(fr_fam_sets, population=self)
+ if keep_nonempty and len(fracture_set) == 0:
+ fam_probs = [fam.size.mean_size(1.0) for fam in self.families]
+ fam_probs = np.array(fam_probs) / np.sum(fam_probs)
+ sample = np.random.multinomial(1, fam_probs, size=1)[0] # Take the single sample.
+ i_family = np.argmax(sample)
+ fracture_set = self.families[i_family].sample(pos_distr, self.shape, i_fam = i_family, force_size=1)
+ fracture_set = fr_set.FractureSet.merge([fracture_set], population=self)
+ return fracture_set
+ #
+ # for ifam, fam in enumerate(self.families):
+ # fr_set.FractureSet(
+ # base_shapes=[self.shape],
+ # shape_idx=0,
+ # radius=fam.size.sample(self.volume),
+ # normal=fam.orientation.sample_normal(size=len(diams)),
+ # center=pos_distr.center(size=len(diams)),
+ # shape_axis=fam.shape_angle.sample_angle(len(diams))
+ # )
+ # #name = fam.name
+ # diams =
+ # fr_normals =
+ # #fr_axis_angle = f.orientation.sample_axis_angle(size=len(diams))
+ # shape_angle =
+ # #np.random.uniform(0, 2 * np.pi, len(diams))
+ # center =
+ #
+ # for r, normal, sa in zip(diams, fr_normals, shape_angle):
+ # #axis, angle = aa[:3], aa[3]
+ # center = pos_distr.sample()
+ # fractures.append(Fracture(
+ # shape_class=self.shape,
+ # r=r,
+ # center=center,
+ # normal=normal[None, :],
+ # shape_angle=sa,
+ # family=fam,
+ # aspect=1,
+ # id=name))
+ # return fractures
+
+
+
+#
+# class FractureGenerator:
+# def __init__(self, frac_type):
+# self.frac_type = frac_type
+#
+# def generate_fractures(self, min_distance, min_radius, max_radius):
+# fractures = []
+#
+# for i in range(self.frac_type.n_fractures):
+# x = uniform(2 * min_distance, 1 - 2 * min_distance)
+# y = uniform(2 * min_distance, 1 - 2 * min_distance)
+# z = uniform(2 * min_distance, 1 - 2 * min_distance)
+#
+# tpl = TPL(self.frac_type.kappa, self.frac_type.r_min, self.frac_type.r_max, self.frac_type.r_0)
+# r = tpl.rnd_number()
+#
+# orient = Orientation(self.frac_type.trend, self.frac_type.plunge, self.frac_type.k)
+# axis, angle = orient.compute_axis_angle()
+#
+# fd = FractureData(x, y, z, r, axis[0], axis[1], axis[2], angle, i * 100)
+#
+# fractures.append(fd)
+#
+# return fractures
+#
+# def write_fractures(self, fracture_data, file_name):
+# with open(file_name, "w") as writer:
+# for d in fracture_data:
+# writer.write("%f %f %f %f %f %f %f %f %d\n" % (d.centre[0], d.centre[1], d.centre[2], d.r, d.rotation_axis[0],
+# d.rotation_axis[1], d.rotation_axis[2], d.rotation_angle, d.tag))
+#
+# def read_fractures(self, file_name):
+# data = []
+# with open(file_name, "r") as reader:
+# for l in reader.readlines():
+# x, y, z, r, axis_0, axis_1, axis_2, angle = [float(i) for i in l.split(' ')[:-1]]
+# tag = int(l.split(' ')[-1])
+# d = FractureData(x, y, z, r, axis_0, axis_1, axis_2, angle, tag)
+# data.append(d)
+#
+# return data
+#
+
+
+
+
+
+
+
+# class Quat:
+# """
+# Simple quaternion class as numerically more stable alternative to the Orientation methods.
+# TODO: finish, test, substitute
+# """
+#
+# def __init__(self, q):
+# self.q = q
+#
+# def __matmul__(self, other: 'Quat') -> 'Quat':
+# """
+# Composition of rotations. Quaternion multiplication.
+# """
+# w1, x1, y1, z1 = self.q
+# w2, x2, y2, z2 = other.q
+# w = w1 * w2 - x1 * x2 - y1 * y2 - z1 * z2
+# x = w1 * x2 + x1 * w2 + y1 * z2 - z1 * y2
+# y = w1 * y2 + y1 * w2 + z1 * x2 - x1 * z2
+# z = w1 * z2 + z1 * w2 + x1 * y2 - y1 * x2
+# return Quat((w, x, y, z))
+#
+# @staticmethod
+# def from_euler(a: float, b: float, c: float) -> 'Quat':
+# """
+# X-Y-Z Euler angles to quaternion
+# :param a: angle to rotate around Z
+# :param b: angle to rotate around X
+# :param c: angle to rotate around Z
+# :return: Quaterion for composed rotation.
+# """
+# return Quat([np.cos(a / 2), 0, 0, np.sin(a / 2)]) @ \
+# Quat([np.cos(b / 2), 0, np.sin(b / 2), 0]) @ \
+# Quat([np.cos(c / 2), np.sin(c / 2), 0, 0])
+#
+# def axisangle_to_q(self, v, theta):
+# # convert rotation given by axis 'v' and angle 'theta' to quaternion representation
+# v = v / np.linalg.norm(v)
+# x, y, z = v
+# theta /= 2
+# w = np.cos(theta)
+# x = x * np.sin(theta)
+# y = y * np.sin(theta)
+# z = z * np.sin(theta)
+# return w, x, y, z
+#
+# def q_to_axisangle(self, q):
+# # convert from quaternion to rotation given by axis and angle
+# w, v = q[0], q[1:]
+# theta = np.acos(w) * 2.0
+# return v / np.linalg.norm(v), theta
diff --git a/src/bgem/stochastic/fr_mesh.py b/src/bgem/stochastic/fr_mesh.py
new file mode 100644
index 0000000..8bb3b2d
--- /dev/null
+++ b/src/bgem/stochastic/fr_mesh.py
@@ -0,0 +1,127 @@
+"""
+Fracture set meshing support, should provide functions to crate a fractures shapes
+using GMSH or BrepWriter.
+- fracture set regularizations
+- cration of GMSH geometry entities using gmsh api
+- cration of BrepWriter entities
+"""
+import pathlib
+import numpy as np
+from typing import Union
+from bgem.bspline import brep_writer as bw
+from bgem.gmsh import gmsh
+
+
+def create_fractures_rectangles(gmsh_geom, fractures, base_shape: 'ObjectSet'):
+ """
+ DEPRECATED, use geometry_gmsh instead.
+ # From given fracture date list 'fractures'.
+ # transform the base_shape to fracture objects
+ # fragment fractures by their intersections
+ # return dict: fracture.region -> GMSHobject with corresponding fracture fragments
+ """
+ assert False, "DEPRECATED, use geometry_gmsh(gmsh_geom, frectures) instead."
+ return None
+
+ # shapes = []
+ # for i, fr in enumerate(fractures):
+ # shape = base_shape.copy()
+ # print("fr: ", i, "tag: ", shape.dim_tags)
+ # shape = shape.scale([fr.rx, fr.ry, 1]) \
+ # .rotate(axis=fr.rotation_axis, angle=fr.rotation_angle) \
+ # .translate(fr.center) \
+ # .set_region(fr.region)
+ #
+ # shapes.append(shape)
+ #
+ # fracture_fragments = gmsh_geom.fragment(*shapes)
+ # return fracture_fragments
+
+
+# def create_fractures_polygons(gmsh_geom, fractures):
+# # From given fracture date list 'fractures'.
+# # transform the base_shape to fracture objects
+# # fragment fractures by their intersections
+# # return dict: fracture.region -> GMSHobject with corresponding fracture fragments
+# frac_obj = fracture.Fractures(fractures)
+# frac_obj.snap_vertices_and_edges()
+# shapes = []
+# for fr, square in zip(fractures, frac_obj.squares):
+# shape = gmsh_geom.make_polygon(square).set_region(fr.region)
+# shapes.append(shape)
+#
+# fracture_fragments = gmsh_geom.fragment(*shapes)
+# return fracture_fragments
+
+
+def geometry_gmsh(fr_set, gmsh_geom: 'GeometryOCC'):
+ """
+
+ :param gmsh_geom:
+ :param fractures:
+ :param base_shape:
+ :param shift:
+ :return:
+ """
+ # From given fracture date list 'fractures'.
+ # transform the base_shape to fracture objects
+ # fragment fractures by their intersections
+ # return dict: fracture.region -> GMSHobject with corresponding fracture fragments
+ if len(fr_set) == 0:
+ return []
+ base_shape = fr_set.base_shape.gmsh_base_shape(gmsh_geom)
+ shapes = []
+ region_map = {}
+ for i, fr in enumerate(fr_set):
+ shape = base_shape.copy()
+ #print("fr: ", i, "tag: ", shape.dim_tags)
+ region_name = f"fam_{fr.family}_{i:03d}"
+ shape = shape.scale([fr.rx, fr.ry, 1]) \
+ .rotate(axis=[0, 0, 1], angle=fr.shape_angle) \
+ .rotate(axis=fr.rotation_axis, angle=fr.rotation_angle) \
+ .translate(fr.center) \
+ .set_region(region_name)
+ region_map[region_name] = i
+ shapes.append(shape)
+
+ #fracture_fragments = gmsh_geom.fragment(*shapes)
+ fr_shapes = gmsh_geom.group(*shapes)
+ return fr_shapes, region_map
+
+
+def geometry_brep_writer(fr_set, brep_name: Union[str, pathlib.Path]):
+ """
+ Create the BREP file from a list of fractures using the brep writer interface.
+
+ Currently works only for 2D .
+ """
+ # fracture_mesh_step = geometry_dict['fracture_mesh_step']
+ # dimensions = geometry_dict["box_dimensions"]
+
+ #print("n fractures:", len(self))
+ if isinstance(brep_name, str):
+ brep_name = pathlib.Path(brep_name)
+ brep_name = brep_name.with_suffix(".brep")
+ faces = []
+ base_vertices = fr_set.base_shape.vertices(8)
+
+ # Legacy transform
+ fr_vtxs = lambda fr : fr.transform(base_vertices) # fr.center
+ fractures_vertices = np.array([fr_vtxs(fr) for fr in fr_set])
+
+ #fractures_vertices = self.transform_mat @ (base_vertices.T)[None, :, :] # (n_fr, 3, 3) @ (1, 3, n_points) -> (n_fr, 3, n_points)
+ #fractures_vertices = fractures_vertices.transpose((0, 2, 1))
+ #fractures_vertices = fractures_vertices + self.center[:, None, :] # (n_fr, 3, n_points) -> (n_fr, n_points, 3)
+
+
+ for i, fr_vertices in enumerate(fractures_vertices):
+ vtxs = [bw.Vertex(p) for p in fr_vertices]
+ edges = [bw.Edge(a, b) for a, b in zip(vtxs[:-1], vtxs[1:])]
+ edges.append(bw.Edge(vtxs[-1], vtxs[0]))
+ face = bw.Face(edges)
+ faces.append(face)
+
+ comp = bw.Compound(faces)
+ with open(brep_name, "w") as f:
+ bw.write_model(f, comp)
+ return brep_name
diff --git a/src/bgem/stochastic/fr_set.py b/src/bgem/stochastic/fr_set.py
index e69de29..842b36a 100644
--- a/src/bgem/stochastic/fr_set.py
+++ b/src/bgem/stochastic/fr_set.py
@@ -0,0 +1,1076 @@
+"""
+Module for representation and processing of a set of fractures, i.e. a single DFN sample.
+
+Should include:
+- Baseclasses for fracture shapes: EllipseShape, RectangleShape, PolygonShape
+- Representation of the single discrete fracture network sample: FractureSet
+ Use vectorized storage, but could extract single fractures for backward compatibility.
+- FractureField
+
+Class dedicated to fractire intersections, simplification, meshing.
+- creation of the BREP model from the list of fractures (deal with intersections)
+- fracture network manipulations and simplifications to allow good meshing
+
+TODO:
+1. Just collection of fractures.
+2. Fracture properties - conductivity model on a single fracture
+ - conductivity = alpha * r ** beta
+ - apperture = 12 * sqrtp(alpha * r**beta)
+
+ ... possible extensions to heterogenous models
+.. TO Be Done
+"""
+import pathlib
+from typing import *
+
+import attrs
+import math
+import numpy as np
+import numpy.typing as npt
+
+from bgem import fn
+from bgem.core import array_attr
+from bgem.bspline import brep_writer as bw
+
+def embed_to_3d(points_2d):
+ points_3d = np.concatenate((
+ points_2d,
+ np.zeros(len(points_2d))[:, None]
+ ), axis=1)
+ return points_3d
+
+"""
+Reference fracture shapes.
+Placed in XY plane and with isotropic shape.
+
+Different shapes should have the surface area same as the unit disc
+in order to be comparable in density (not necesarily in the connectivity).
+"""
+_shape_for_id = {
+}
+class BaseShape:
+ """
+ Abstract class.
+ All subclasses should represent shpapes with the area equal to 1.0.
+ TODO: check that aabb and unit area asre satisfied for individual shapes.
+ """
+
+ @staticmethod
+ def shape_for_id(id: int):
+ if id in _shape_for_id:
+ return _shape_for_id[id]
+ else:
+ return PolygonShape(id)
+
+ @property
+ def aabb(self):
+ """
+ Size of the bounding box for any rotation of the reference shape.
+ For an isotropic reference shape we have a bounding box (-D,+D) x (-D,+D)
+ This method provides conservative generic AABB. Shape specific implementation
+ could be provided.
+ :return: D - half of the box size
+ """
+ half_size = np.ones(2) * self.R
+ return np.stack([-half_size, half_size])
+
+ @staticmethod
+ def ellipses_aabb(self, centers, a_vectors, b_vectors):
+ """
+ Compute AABBs for ellipses given by `centers` and semiaxis vectors `a_vectors`, `b_vectors`.
+ Inputs shape: (n_fractures, 3)
+ Based on finding extremes of the parametric equations.
+ :return: AABBs array , shape (n_fracutres, 2, 3)
+ AABB of fracture i has min corner AABB[i, 0, :] and max corner AABB[i, 1, :]
+ """
+ # Number of fractures
+ n_fractures = centers.shape[0]
+
+ # Calculate theta values for critical points
+ theta = np.arctan2(b_vectors, a_vectors)
+
+ # Compute cos(theta) and sin(theta)
+ cos_theta = np.cos(theta)
+ sin_theta = np.sin(theta)
+
+ # Parametric equations evaluated at critical points
+ points_1 = cos_theta * a_vectors + sin_theta * b_vectors,
+ points_2 = -cos_theta * a_vectors - sin_theta * b_vectors
+ extrem_points = np.stack((
+ np.minimum(points_1, points_2),
+ np.maximum(points_1, points_2)),
+ axis=1
+ )
+
+ return extrem_points + centers[:, None, :]
+
+
+ def are_points_inside(self, points):
+ """
+ Virtual method.
+ :param points: shape (N, 2)
+ :return: logical array (N,)
+ """
+ pass
+
+ @staticmethod
+ def disc_approx(n_sides=8, scale=(1.0, 1.0)):
+ """
+ Return (n_sides, 3) array with coordinates of regular polygon
+ inscribed to the circle with radius 'scale'.
+ Z coordinate set to 0.
+ :param n_sides:
+ :param scale: radius of circle or two radii (r_x, r_y) of the ellipse
+ :return: array of (n_sides, 3) shape
+ """
+ scale = scale * np.ones(2)
+ angles = np.linspace(0, 2 * np.pi, n_sides, endpoint=False)
+ points = np.stack((
+ np.cos(angles) * scale[0],
+ np.sin(angles) * scale[1],
+ np.zeros_like(angles)
+ ), axis=1)
+ return points
+
+ def vertices(self, n_sides=None):
+ """
+ Return 3d coordinates of the fracture polygon or its approximation for ellipse.
+ :param n_sides: only used by ellipse implementation,
+ :return: ndarray (n_sides, 3)
+ """
+ points_3d = self.disc_approx(self.n_sides, scale=self.R)
+
+ return points_3d
+
+class LineShape(BaseShape):
+ """
+ Does not fit to 3D conceptually. Introduce carefully once 3D case API is properly designed and tested.
+ """
+ id = 2
+
+
+ def gmsh_base_shape(self, gmsh_geom: 'GeometryOCC'):
+ return gmsh_geom.line([-0.5, 0, 0], [0.5, 0, 0])
+
+_shape_for_id[LineShape.id] = LineShape()
+
+
+class EllipseShape(BaseShape):
+ """
+ Disc base fracture shape.
+ """
+
+ id = 0
+
+ def __init__(self):
+ self.n_sides = np.inf
+ self.R = 1 / math.sqrt(math.pi)
+ self.r = self.R
+ # Radius of the reference disc of unit area
+ self._scale_sqr = 1 / math.pi
+ # Faster identification of inside points.
+
+ def is_point_inside(self, x, y):
+ return x**2 + y**2 <= self._scale_sqr
+
+ def are_points_inside(self, points):
+ sq = points ** 2
+ return sq[:, 0] + sq[:, 1] <= self._scale_sqr
+
+ def gmsh_base_shape(self, gmsh_geom: 'GeometryOCC'):
+ return gmsh_geom.disc(rx=self.scale, ry=self.scale)
+
+ def vertices(self, n_sides=8):
+ """
+ Approximate `n_sides` polygon of the unit area.
+ :return: ndarray (n_sides, 3)
+ """
+ return PolygonShape(n_sides).vertices()
+
+_shape_for_id[EllipseShape.id] = EllipseShape()
+
+class RectangleShape(BaseShape):
+ """
+ Reference square shape with area 1.0 and center at origin.
+ """
+ id = 4
+
+ def __init__(self):
+ """
+ Initializes a RegularPolygon instance for an N-sided polygon.
+
+ Args:
+ - N: Number of sides of the regular polygon.
+ """
+ # Square with area of unit disc.
+ self.R = 1/math.sqrt(2)
+ # Radius of circumcircle for square of unit area.
+ # S = 4 * sin(45)* cos(45) * R^2 = 4 * 1/2 * 1/sqrt(2)^2 = 1
+ self.r = 0.5
+ # Radius of inscribed circle for square of unit area.
+ self.n_sides = 4
+
+ def is_point_inside(self, x, y):
+ """
+ Tests if a point (x, y) is inside the regular N-sided polygon.
+
+ Args:
+ - x, y: Coordinates of the point to test.
+
+ Returns:
+ - True if the point is inside the polygon, False otherwise.
+ """
+ return (abs(x) < self.r) and (abs(y) < self.r)
+
+ def are_points_inside(self, points):
+ """
+ Tests if points in a NumPy array are inside the regular N-sided polygon.
+ Args:
+ - points: A 2D NumPy array of shape (M, 2), where M is the number of points
+ and each row represents a point (x, y).
+ Returns:
+ - A boolean NumPy array where each element indicates whether the respective
+ point is inside the polygon.
+ """
+ return np.max(np.abs(points), axis=1) < self.r
+
+ def gmsh_base_shape(self, gmsh_geom: 'GeometryOCC'):
+ return gmsh_geom.rectangle()
+
+ def vertices(self, n_sides=8):
+ """
+ PolygonShape(4) provides square in diamond position.
+ We need edges parallel with axis to provide fast interior indicator.
+ :return: ndarray (n_sides, 3)
+ """
+ return self.r * np.array([[-1, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0]] )
+
+_shape_for_id[RectangleShape.id] = RectangleShape()
+#_shape_for_id[4] = _shape_for_id[RectangleShape.id]
+
+
+class PolygonShape(BaseShape):
+
+ def __init__(self, N):
+ """
+ Initializes a RegularPolygon instance for an N-sided polygon.
+
+ Area S = N * sin th/2 * cos th/2 * R^2 = 0.5 * N * sin th * R^2
+ Args:
+ - N: Number of sides of the regular polygon.
+ """
+ assert N > 4
+
+ self.n_sides = N
+ self.theta_segment_half = math.pi / N # half angle of each segment
+ self.cos_theta = math.cos(self.theta_segment_half)
+ self.R = 1 / math.sqrt(0.5 * N * math.sin(2 * self.theta_segment_half))
+ # Radius of circumcircle. For polygon of the unit area.
+ self.r = self.cos_theta * self.R
+ # Radius of inscribed circle for R=1
+
+ @property
+ def id(self):
+ return self.n_sides
+
+
+ def is_point_inside(self, x, y):
+ """
+ Tests if a point (x, y) is inside the regular N-sided polygon.
+
+ Args:
+ - x, y: Coordinates of the point to test.
+
+ Returns:
+ - True if the point is inside the polygon, False otherwise.
+ """
+ r = math.sqrt(x**2 + y**2) # Convert point to polar coordinates (radius)
+ theta = math.atan2(y, x) # Angle in polar coordinates
+
+ # Compute the reminder of the angle and the x coordinate of the reminder point
+ theta_reminder = theta % self.theta_segment_half
+ x_reminder = math.cos(theta_reminder) * r
+
+ # Check if the x coordinate of the reminder point is less than
+ # the radius of the inscribed circle (for R=1)
+ return x_reminder <= self.r
+
+ def are_points_inside(self, points):
+ """
+ Tests if points in a NumPy array are inside the regular N-sided polygon.
+ Args:
+ - points: A 2D NumPy array of shape (M, 2), where M is the number of points
+ and each row represents a point (x, y).
+ Returns:
+ - A boolean NumPy array where each element indicates whether the respective
+ point is inside the polygon.
+ """
+ r = np.sqrt(points[:, 0]**2 + points[:, 1]**2)
+ theta = np.arctan2(points[:, 1], points[:, 0])
+ theta_reminder = theta % self.theta_segment_half
+ x_reminder = np.cos(theta_reminder) * r
+ return x_reminder <= self.r
+
+ def gmsh_base_shape(self, gmsh_geom: 'GeometryOCC'):
+ """
+ Base shape for an N side polygon.
+ :param gmsh_geom:
+ :return:
+ """
+ points = self.disc_approx(n_sides=self.n_sides, scale=self.R)
+ return gmsh_geom.make_polygon(points)
+
+
+__base_shapes = [LineShape, EllipseShape, RectangleShape, PolygonShape]
+__shape_ids = {shape:i for i, shape in enumerate(__base_shapes)}
+
+# class LineShape:
+# """
+# Class represents the reference line 2D fracture shape.
+#
+# The polymorphic `make_approx` method is used to create polygon (approximation in case of disc) of the
+# actual fracture.
+# """
+# _points = np.array([[-0.5, 0, 0], [0.5, 0, 0]])
+#
+# @classmethod
+# def make_approx(cls, x_scale, y_scale, step=None):
+# xy_scale = np.array([x_scale, y_scale, 1.0])
+# return cls._points[:, :] * xy_scale[None, :]
+
+
+# class SquareShape(LineShape):
+# """
+# Class represents the square fracture shape.
+# """
+# _points = np.array([[-0.5, -0.5, 0], [0.5, -0.5, 0], [0.5, 0.5, 0], [-0.5, 0.5, 0]])
+
+
+# class DiscShape:
+# """
+# Class represents the square fracture shape.
+# """
+#
+# @classmethod
+# def make_approx(cls, x_scale, y_scale, step=1.0):
+# n_sides = np.pi * min(x_scale, y_scale) / step
+# n_sides = max(4, n_sides)
+# angles = np.linspace(0, 2 * np.pi, n_sides, endpoint=False)
+# points = np.stack(np.cos(angles) * x_scale, np.sin(angles) * y_scale, np.ones_like(angles))
+# return points
+
+
+
+
+def normal_to_axis_angle(normal): ## todo
+ """
+
+ """
+ z_axis = np.array([0, 0, 1], dtype=float)
+ norms = normal / np.linalg.norm(normal)
+ cos_angle = np.dot(norms, z_axis)
+ angle = np.arccos(cos_angle)
+ # sin_angle = np.sqrt(1-cos_angle**2)
+
+ axis = np.cross(z_axis, norms)
+ ax_norm = np.linalg.norm(axis)
+ if ax_norm < 1e-13:
+ axis = np.array([1, 0, 0])
+ else:
+ axis = axis / ax_norm
+ #return axes, angles
+ return axis, angle
+
+def normals_to_axis_angles(normals): ## todo
+ """
+
+ """
+ z_axis = np.array([0, 0, 1], dtype=float)
+ norms = normals / np.linalg.norm(normals, axis=1)[:, None]
+ cos_angle = norms @ z_axis
+ angles = np.arccos(cos_angle)
+ # sin_angle = np.sqrt(1-cos_angle**2)
+
+ axes = np.cross(z_axis, norms, axisb=1)
+ ax_norm = np.maximum(np.linalg.norm(axes, axis=1), 1e-200)
+ axes = axes / ax_norm[:, None]
+ #return axes, angles
+ return np.concatenate([axes, angles[:, None]], axis=1)
+
+
+def rotate(vectors, axis=None, angle=0.0, axis_angle=None):
+ """
+ Rotate given vector around given 'axis' by the 'angle'.
+ :param vectors: array of 3d vectors, shape (n, 3)
+ :param axis_angle: pass both as array (4,)
+ :return: shape (n, 3)
+ """
+ if axis_angle is not None:
+ axis, angle = axis_angle[:3], axis_angle[3]
+ if angle == 0:
+ return vectors
+ vectors = np.atleast_2d(vectors)
+ cos_angle, sin_angle = np.cos(angle), np.sin(angle)
+ rotated = vectors * cos_angle \
+ + np.cross(axis, vectors, axisb=1) * sin_angle \
+ + axis[None, :] * (vectors @ axis)[:, None] * (1 - cos_angle)
+ # Rodrigues formula for rotation of vectors around axis by an angle
+ return rotated
+
+
+
+
+
+
+
+
+@attrs.define
+class Fracture:
+ """
+ Single fracture sample.
+ TODO: modify to the acessor into the FrSet objects.
+ """
+ shape_idx: int
+ # Basic fracture shape idx.
+ radius: Tuple[float, float] = attrs.field(converter=lambda v: (float(v[0]), float(v[1])) if hasattr(v, "__getitem__") else (float(v), float(v)))
+
+ # Fracture diameter, laying in XY plane
+ center: np.array
+ # location of the barycentre of the fracture
+ normal: np.array
+ # fracture normal
+ shape_axis: np.array = attrs.field(
+ converter=lambda v: (float(v[0]), float(v[1])) if hasattr(v, "__getitem__") else (np.cos(v), np.sin(v)),
+ default = np.array([1, 0]))
+ # angle to rotate the unit shape around z-axis; rotate anti-clockwise
+ #region_id: int # Union[str, int] = "fracture"
+ # Family index in population. Could be used to identify group of fractures even for population = None
+ family: int = None
+ # Original family, None if created manually (in tests)
+ #aspect: float = 1
+ # aspect ratio of the fracture = y_length / x_length where x_length == r
+ #id: Any = None
+ # any value associated with the fracture (DEPRECATED should be replaced by
+ # FrValue class and fr_mesh code
+ population: 'Population' = None
+
+
+ _rotation_axis: np.array = attrs.field(init=False, default=None)
+ # axis of rotation
+ _rotation_angle: float = attrs.field(init=False, default=None)
+ # angle of rotation around the axis (?? counterclockwise with axis pointing up)
+ _distance: float = attrs.field(init=False, default=None)
+ # absolute term in plane equation
+ _plane_coor_system: np.array = attrs.field(init=False, default=None)
+ # local coordinate system
+ _vertices: np.array = attrs.field(init=False, default=None)
+ # coordinates of the vertices
+ _ref_vertices: np.array = attrs.field(init=False, default=None)
+ # local coordinates of the vertices (xy - plane)
+
+ @property
+ def shape(self):
+ return BaseShape.shape_for_id(self.shape_idx)
+
+ @property
+ def r(self):
+ return self.radius[0]
+
+ @property
+ def aspect(self):
+ return self.radius[1] / self.radius[0]
+
+ @property
+ def shape_angle(self):
+ angle = np.arctan2(self.shape_axis[1], self.shape_axis[0])
+ return angle
+
+ @property
+ def vertices(self):
+ if self._vertices is None:
+ _vertices = self.transform(self.ref_vertices)
+ return _vertices
+
+ @property
+ def ref_vertices(self):
+ if self._ref_vertices is None:
+ n_approx_sides = 8
+ _ref_vertices = self.shape.vertices(n_approx_sides)
+ return _ref_vertices
+
+ @property
+ def rx(self):
+ return self.radius[0]
+
+ @property
+ def ry(self):
+ return self.radius[1]
+
+ @property
+ def scale(self):
+ return np.array([self.r, self.r * self.aspect])
+
+ @property
+ def rotation_angle(self):
+ if self._rotation_angle is None:
+ _rotation_axis, _rotation_angle = self.axis_angle()
+ return _rotation_angle
+
+ @property
+ def rotation_axis(self):
+ if self._rotation_axis is None:
+ _rotation_axis, _rotation_angle = self.axis_angle()
+ return _rotation_axis
+
+ def axis_angle(self):
+ axis, angle = normal_to_axis_angle(self.normal)
+ return axis, angle
+
+ def axis_angles(self):
+ axis_angle = normals_to_axis_angles([self.normal])[0,:]
+ _rotation_axis = axis_angle[0:3]
+ _rotation_angle = axis_angle[3]
+ return _rotation_axis, _rotation_angle
+
+ @property
+ def distance(self):
+ if self._distance is None:
+ _distance = -np.dot(self.center, self.normal)
+ return _distance
+
+ @property
+ def plane_coor_system(self):
+ if self._plane_coor_system is None:
+ _plane_coor_system = self.transform(np.array([[1.0, 0, 0], [0, 1.0 ,0]]))
+ return _plane_coor_system
+
+ def get_angle_with_respect_normal(self,vec):
+
+ dot = self.normal[0] * vec[0] + self.normal[1] * vec[1] + self.normal[2] * vec[2]
+ angle = np.arccos((dot)/np.linalg(vec))
+
+ return angle
+
+ def internal_point_2d(self, points):
+ """
+ Determines the interior points of the fracture.
+ :param points: array (3,n)
+ :return:
+ polygon_points as list of int: indices od of the interior points in points
+ """
+ polygon_points = []
+ for i in range(0,points.shape[0]):
+ #eps = abs(self.normal[0,0] * points[i,0] + self.normal[0,1] * points[i,1] + self.normal[0,2] * points[i,2] - self.distance)\
+ # / math.sqrt(np.linalg.norm(self.normal)**2 + self.distance**2 )
+ #if eps < 1e-15:
+ # continue
+
+ dot = np.zeros((self.ref_vertices.shape[0]))
+ for j in range(-1, self.ref_vertices.shape[0]-1):
+ bound_vec = self.ref_vertices[j+1] - self.ref_vertices[j]
+ sec_vec = self.ref_vertices[j+1] - points[i,:]
+ dot[j+1] = bound_vec[0]*sec_vec[0] + bound_vec[1]*sec_vec[1] + bound_vec[2]*sec_vec[2]
+
+ if np.sum(dot>0) == self.ref_vertices.shape[0] or np.sum(dot<0) == self.ref_vertices.shape[0]:
+ polygon_points.append(i)
+
+ #if polygon_points == []:
+ # polygon_points = None
+
+ return polygon_points
+
+ def dist_from_plane(self,point):
+ """
+ Computes distance from plane
+ :param point: array (3,)
+ :return: distance as double
+ """
+
+ dist = self.normal[0] * point[0] + self.normal[1] * point[1] + self.normal[2] * point[2] + self.distance
+ return dist
+
+
+ def get_isec_with_line(self, x_0, loc_direct):
+ """
+ Computes intersection of the fracture and line x0 + t*loc_direct (in local coordinates).
+ :param x0: array (3,)
+ :param loc_direct: array (3,)
+ :return:
+ x_isec as list of array (3,): real intersection points
+ x_isec_false as list of array (3,): intersection points outside the edge of the fracture
+ x_isec_start_vert_ind as list of int: index of the nearest initial point of the false intersection point
+ """
+
+ x_isec = []
+ x_isec_false = []
+ x_isec_start_vert_ind = []
+
+ n_approx_sides = 8
+ shape_points = self.shape.vertices(n_approx_sides)
+ bound_vec = np.zeros(shape_points.shape)
+ x_0_b = np.zeros(shape_points.shape)
+
+ aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float) # 0.5 *
+ points = shape_points #* aspect[None, :] # self.shape_class._points
+
+
+
+ col2 = loc_direct
+ for i in range(0, shape_points.shape[0] - 1):
+ col1 = points[i] - points[i-1]
+ rhs = (x_0 - points[i-1])[0]
+ det = col1[0] * col2[1] - col1[1] * col2[0]
+ det_x1 = rhs[0] * col2[1] - rhs[1] * col2[0]
+ #colinear intersections (joins) should be solved in a different way
+ if abs(det) > 0:
+ t = det_x1/det
+ if (t >= 0.0) and (t <= 1.0):
+ x_isec.append(x_0_b[i] + col1 * t)
+ else:
+ x_isec_false.append(x_0_b[i] + col1 * t)
+ if (t < 0.0):
+ x_isec_start_vert_ind.append(i-1)
+ elif (t > 1.0):
+ x_isec_start_vert_ind.append(i)
+ else:
+ if (i - 1) not in x_isec_start_vert_ind:
+ x_isec_start_vert_ind.append(i-1)
+ x_isec_false.append([])
+ if (i) not in x_isec_start_vert_ind:
+ x_isec_start_vert_ind.append(i)
+ x_isec_false.append([])
+
+ return x_isec, x_isec_false, x_isec_start_vert_ind
+
+ def transform(self, points):
+ """
+ Map local points on the fracture to the 3d scene.
+ :param points: array (n, 3)
+ :return: transformed points
+ """
+ aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float)
+ t_points= points * aspect[None, :] #[:, :]
+ #points[:, :] *= aspect[:,None]
+ t_points = rotate(t_points, np.array([0, 0, 1]), self.shape_angle)
+ t_points = rotate(t_points, self.rotation_axis, self.rotation_angle)
+ t_points += self.center[None, :]
+ return t_points
+
+ def back_transform(self, points):
+ """
+ Map points from 3d scene into local coordinate system.
+ :param points: array (n, 3)
+ :return: transformed points
+ """
+ aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float)
+ t_points = points - self.center[None, :]
+ t_points = rotate(t_points, self.rotation_axis, -self.rotation_angle)
+ t_points = rotate(t_points, np.array([0, 0, 1]), -self.shape_angle)
+ t_points /= aspect[None, :]
+ return t_points
+
+
+ def transform_clear(self, points):
+ """
+ Map local points on the fracture to the 3d scene.
+ :param points: array (n, 3)
+ :return: transformed points
+ """
+ aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float)
+ t_points= points * aspect[None, :] #[:, :]
+ #points[:, :] *= aspect[:,None]
+ t_points = rotate(t_points, np.array([0, 0, 1]), self.shape_angle)
+ t_points = rotate(t_points, self.rotation_axis, self.rotation_angle)
+ #t_points += self.centre[None, :]
+ return t_points
+
+ def back_transform_clear(self, points):
+ """
+ Map points from 3d scene into local coordinate system.
+ :param points: array (n, 3)
+ :return: transformed points
+ """
+ aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float)
+ #t_points = points - self.centre[None, :]
+ t_points = rotate(points, self.rotation_axis, -self.rotation_angle)
+ t_points = rotate(t_points, np.array([0, 0, 1]), -self.shape_angle)
+ t_points /= aspect[None, :]
+ return t_points
+
+
+
+
+
+
+
+
+@attrs.define
+class FractureSet:
+ """
+ Interface to the array based storage for the fractures.
+ Has given outer box domain.
+
+ The 1D fractures in 2D are treated as 2D fractures in 3D but:
+ - centers have z=0
+ - normals have z=0
+ - shape_angle = 0
+ - shape is two point line segment= array_attr(shape=(-1, 2), dtype=np.double)
+ - r[0,:] = r[1,:]
+ """
+
+ #domain: NDArray[Shape['3'], Float] # Box given by (3,) shape array
+ #base_shapes : List[Any] # Unique reference shape classes
+
+ #shape_idx = array_attr(shape=(-1,), dtype=np.int32) # Base shape type index into 'base_shapes' list.
+ base_shape_idx = attrs.field(type=int) # keep fracture sets of common shape, that is far enough for practical applications
+ radius = array_attr(shape=(-1, 2), dtype=np.double) # shape (n_fractures, 2), X and Y scaling of the reference shape.
+ center = array_attr(shape=(-1, 3), dtype=np.double) # center (n_fractures, 3); translation of the reference shape to actual position
+ normal = array_attr(shape=(-1, 3), dtype=np.double) # fracture unit normal vectors.
+
+ shape_axis = array_attr(shape=(-1, 2), dtype=np.double) # X reference unit vector in XY plane (n_fractures, 2)
+ family = array_attr(shape=(-1,), dtype=np.int32) # index of the fracture family within population
+
+ population = attrs.field(type='Population', default=None) # Generating population. Gives meaning to fr family indices.
+
+ def __len__(self):
+ return self.radius.shape[0]
+
+ def __iter__(self):
+ for i in range(len(self)):
+ yield self[i]
+
+ @property
+ def base_shapes(self):
+ return self.__module__.__base_shapes
+
+ @property
+ def base_shape(self):
+ return BaseShape.shape_for_id(self.base_shape_idx)
+
+ @property
+ def _base_shape_area(self):
+ """
+ Array of areas of the unit/base shapes.
+ """
+ return np.array([shp.area for shp in self.base_shapes])
+
+ @fn.cached_property
+ def area(self):
+ """
+ Array of fracture areas
+ """
+ shape_factor = self._base_shape_area[self.base_shape_idx]
+ return shape_factor * np.prod(self.radius, axis=1)
+
+ @classmethod
+ def parallel_plates(cls, box, normal, shift=0):
+ """
+ Construct parallel fractures covering the box given normal and separation given by the normal length.
+ The optional shift parameter provides shift po the fracture grid with respect to the origin.
+ :param box:
+ :param normal:
+
+ :return:
+ """
+ box = np.array(box)
+ normal = np.array(normal)
+ diag = np.linalg.norm(box)
+ separation = np.linalg.norm(normal)
+ n_fr = int(diag / separation) + 1
+ shift = shift % separation
+ plates = [
+ Fracture(
+ RectangleShape.id,
+ radius=diag,
+ center=(i - n_fr // 2) * normal + shift * normal + box / 2.0,
+ normal=normal / separation,
+ shape_angle=0,
+ region_id=0)
+ for i in range(n_fr)
+ ]
+ return cls.from_list(plates)
+
+ @classmethod
+ def area_sorted(cls, other: 'FractureSet') -> 'FractureSet':
+ area_order = np.argsort(other.area)
+ return cls(
+ base_shape_idx=other.base_shape_idx,
+ radius=other.radius[area_order],
+ center=other.center[area_order],
+ normal=other.normal[area_order],
+ shape_axis=other.shape_axis[area_order],
+ family=other.family[area_order],
+ population=other.population
+ )
+
+ @classmethod
+ def from_list(cls, fr_list: List[Fracture]) -> 'FractureSet':
+ """
+ Construct a fracture set from a list of 'Fracture' objects.
+ :param fr_list:
+ :return:
+ TODO: deal with shape_idx
+ TODO: fix shape_axis
+ """
+ fr_attribute = lambda attr : [getattr(fr, attr) for fr in fr_list]
+ shape_idx_list = fr_attribute('shape_idx')
+ #base_shape = {}
+ #shape_idx_set = {si for si in shape_class_list}
+ assert len(set(shape_idx_list)) == 1
+ shape_idx = shape_idx_list[0]
+
+ shape_axis = fr_attribute('shape_axis')
+ shape_axis = np.array([[1, 0] if sa is None else sa for sa in shape_axis])
+ shape_axis = np.stack(shape_axis, axis=0)
+
+ family_idx = np.full(len(fr_list), 0)
+
+ return cls(
+ shape_idx,
+ radius=fr_attribute('radius'),
+ center=fr_attribute('center'),
+ normal=fr_attribute('normal'),
+ shape_axis=shape_axis,
+ family=family_idx
+ )
+
+ @staticmethod
+ def _concat_attr(fr_sets, attr, size=0):
+ con = np.concatenate([getattr(fs, attr) for fs in fr_sets])
+ if size:
+ assert con.shape[0] == size
+ return con
+
+ @classmethod
+ def merge(cls, fr_sets: List['FractureSet'], population=None) -> 'FractureSet':
+ shape_idx_set = {fs.base_shape_idx for fs in fr_sets}
+ assert len(shape_idx_set) == 1
+ shape_idx = list(shape_idx_set)[0]
+ ccat = lambda attr : cls._concat_attr(fr_sets, attr)
+ return cls(
+ shape_idx,
+ radius=ccat('radius'),
+ center=ccat('center'),
+ normal=ccat('normal'),
+ shape_axis=ccat('shape_axis'),
+ family=ccat('family'),
+ population = population
+ )
+
+
+ @fn.cached_property
+ def AABB(self):
+ """
+ Axis Aligned Bounding Box for each fracture.
+
+ See this blog: https://iquilezles.org/articles/diskbbox/
+ AABB of an ellipse with axes U, V with center at origin:
+ AABB[ax] = +/-sqrt( U[ax]**2 + V[ax]**2)
+
+ The For given normal the formula is:
+ AABB[ax] = +/- R * sqrt(1-N[ax]**2)
+
+ That is AABB of the cicumflex disc.
+ Tight AABB for a non 1:1 aspect ratio fracture
+ would be much more complicated.
+
+ :return: AABB corners as the array of shape: (N, 2, 3)
+ min corner: AABB[:, 0, :], max corner AABB[:, 1, :]
+
+ TODO: Do better! AABB is bounding, but not tight, gap is about 16%.
+ """
+ max_radii = np.max(self.radius, axis=1) * self.base_shape.R
+ corners = self.center[:,None, :] + np.stack([-max_radii, +max_radii], axis=1)[:, :, None]
+ return corners
+
+ @staticmethod
+ def rotation_matrix_from_normals(normal, shape_axis):
+ """
+ Compute rotation matrices for the fractures:
+ normal - shape (n_fr, 3); normal vectors of fractures, assumed normalized.
+ shape_axis - shape (n_fr, 2); the fracture shape is first rotated in XY plane by a X axis -> shape_axis rotation.
+
+
+ The Z- local axis is transformed to normal N (assumed unit).
+ The shape_axis S = [sx, sy, 0] must be rotated to SS by the rotation that rotates Z -> N.
+ Then SS is transformation of the local X axis.
+ The transformation of the Y axis is then computed by the corss product.
+
+ Let's compute S':
+ 1. Z -> N rotation unit axis K = [-Ny, Nx, 0] / Nxy
+ 2. K . S = Sx Ny - Sy Nx
+ 3. follow Rodriguez formula proof, split S into part parallel (p) with K and ortogonal (o) to K
+ Sp = (K.S) K
+ So = S - Sp
+ 4. In the plane perpendicular to K,
+ we have vertical component giving: cos(th) = Nz
+ and horizontal component giving sin(th) = Nxy = sqrt(Nx^2 + Ny^2)
+ 5. We rotate So by angle th:
+ SSo[z] = -|So| Nxy *sng(Nx)
+ SSo[x,y] = (So [x,y]) Nz
+ 6. SSp = Sp
+ 7. Sum:
+ SS = SSo + SSp :
+ SSx = Spx + (Nz)Sox
+ SSy = Spy + (Nz)Soy
+ SSz = -|So| Nxy *sng(Nx)
+ Finally, the third vector of the rotated bases is cross(N, S')
+ TODO: find a better vector representation of the rotations, allowing faster construction of the transfrom matrix
+ :return: Shape (N, 3, 3).
+ """
+
+ assert np.allclose(np.linalg.norm(normal, axis=1), 1)
+ Nxy = np.stack([-normal[:, 1], normal[:, 0]], axis=1)
+ norm_Nxy = np.linalg.norm(Nxy, axis=1)
+ K = Nxy / norm_Nxy[:, None]
+ arg_small = np.argwhere(norm_Nxy < 1e-13)[:, 0]
+ K[arg_small, :] = np.array([1, 0], dtype=float)
+ K_dot_S = shape_axis[:, None, :] @ K[:, :, None]
+ S_p = K_dot_S[:, 0, :] * K
+ S_o = shape_axis - S_p
+ cos_th = normal[:, 2:3]
+ SS_xy = S_p + cos_th * S_o
+ # ?? th is in (0, pi) -> sin(th) always positive
+ # pos_nx = np.logical_xor(N[:, 0] > 0, N[:, 2] < 0)
+ pos_nx = (normal[:, None, 0:2] @ shape_axis[:, :, None])[:, 0, 0] > 0
+ sin_th = norm_Nxy
+ sin_th[pos_nx] = - sin_th[pos_nx]
+ SS_z = np.linalg.norm(S_o, axis=1) * sin_th
+
+ # Construct the rotated X axis SS vector, shape (N, 3)
+ SS = np.concatenate([
+ SS_xy,
+ SS_z[:, None]
+ ], axis=1)
+ scaled_trans_x = SS # * self.radius[:, 0:1]
+ scaled_trans_y = np.cross(normal, SS, axis=1) # * self.radius[:, 1:2]
+ trans_z = normal
+ rot_mat = np.stack([scaled_trans_x, scaled_trans_y, trans_z], axis=2)
+ return rot_mat
+
+ @fn.cached_property
+ def rotation_mat(self):
+ """
+ The full transform involves 'self.center' as well:
+ ambient_space_points = self.center + scale_mat @ self.transform_mat @ local_fr_points[:, None, :]
+
+ :return:
+ """
+ return self.rotation_matrix_from_normals(self.normal, self.shape_axis)
+
+ @fn.cached_property
+ def transform_mat(self):
+ """
+ Rotate and scale matrices for the fractures. The full transform involves 'self.center' as well:
+ ambient_space_points = self.center + self.transform_mat @ local_fr_points[:, None, :]
+
+ The Z- local axis is transformed to normal N (assumed unit).
+ The shape_axis S = [sx, sy, 0] must be rotated to SS by the rotation that rotates Z -> N.
+ Then SS is transformation of the local X axis.
+ The transformation of the Y axis is then computed by the corss product.
+
+ Let's compute S':
+ 1. Z -> N rotation unit axis K = [-Ny, Nx, 0] / Nxy
+ 2. K . S = Sx Ny - Sy Nx
+ 3. follow Rodriguez formula proof, split S into part parallel (p) with K and ortogonal (o) to K
+ Sp = (K.S) K
+ So = S - Sp
+ 4. In the plane perpendicular to K,
+ we have vertical component giving: cos(th) = Nz
+ and horizontal component giving sin(th) = Nxy = sqrt(Nx^2 + Ny^2)
+ 5. We rotate So by angle th:
+ SSo[z] = -|So| Nxy *sng(Nx)
+ SSo[x,y] = (So [x,y]) Nz
+ 6. SSp = Sp
+ 7. Sum:
+ SS = SSo + SSp :
+ SSx = Spx + (Nz)Sox
+ SSy = Spy + (Nz)Soy
+ SSz = -|So| Nxy *sng(Nx)
+ Finally, the third vector of the rotated bases is cross(N, S')
+ TODO: find a better vector representation of the rotations, allowing faster construction of the transfrom matrix
+ :return: Shape (N, 3, 3).
+ """
+ trans_mat = (self.rotation_mat[:, :, :]).copy()
+ trans_mat[:, :, 0] *= (self.radius[:, 0])[:, None]
+ trans_mat[:, :, 1] *= (self.radius[:, 1])[:, None]
+ return trans_mat
+
+ @fn.cached_property
+ def inv_transform_mat(self):
+ """ TODO transpose of just rot mat, deal with scaling as well."""
+ inv_trans_mat = (self.rotation_mat[:, :, :].transpose((0, 2, 1))).copy()
+ inv_trans_mat[:, 0, :] /= (self.radius[:, 0])[:, None]
+ inv_trans_mat[:, 1, :] /= (self.radius[:, 1])[:, None]
+ return inv_trans_mat
+
+
+
+ def __getitem__(self, item):
+ family_idx = self.family[item]
+ return Fracture(
+ shape_idx=self.base_shape_idx,
+ radius=self.radius[item, :],
+ center=self.center[item, :],
+ normal=self.normal[item, :],
+ shape_axis=self.shape_axis[item, :],
+ family=family_idx,
+ population=self.population
+ )
+
+
+
+"""
+Following is work in progress. Don not use it.
+TODO: move to appropriate feature branch.
+"""
+
+@attrs.define
+class FractureValues:
+ """
+ A quantities on the fracture set, one value for each fracture, constant on the fracture.
+ """
+ fractures: FractureSet
+ values: npt.NDArray[np.double]
+
+"""
+Some Fracture Values Operations
+"""
+def fr_values_permeability():
+ pass
+
+
+class FractureMesh:
+ """
+ GMSH specific class for fracture mesh generation.
+ Mesh generation procedure:
+ 1. Bulk GMSH geometry, functional approach allowing association of some data with parts of geometry.
+ Functional approach: refer to the shapes through Entities: dim_id set + dict property -> PropertyValues;
+ ProperyValues: [value], {dim_id: value_idx}, works as dim_id -> value map (Shold exsits as a CompressedDict or SparseDict)
+
+ Operations: merge properties dicts, parent has priority Intersection - get A * B parent properties, optionaly get disable 'other' properties
+ cut - get parent properties
+ union - keep properties of A, B, intersection properties on A * B
+ 2. Create fractures and apply them to the bulk geometry. Can apply to given bulk shape or to the whole geometry.
+ Cut fractures at shape boundary. => Intersection map: fracture -> Entity(dim_id set)
+ 3. FractureMesh - various simplification and modification operations
+ FractureMap - attach entities and properties to mesh elements through gmsh_shape_ids (internaly)
+ Field ... could be defined with respect to fracture properties, access to element attached properties
+
+ Temporary solution until the functional GMSH approach would be implemented:
+ 1. Assign fracture region IDs to the geometry fracture shapes after fragmentation -> return from fracture application
+ 2. Create FractureMesh with element -> fracture properties (r, ...), should work as Fields having value only on fracture elements
+ FractureMehs should provide getters to fracture and bulk property fields:
+ bulk:
+ - center
+ - el_volume
+ - el_transform
+ - custom bulk declared properties
+ fracture (in adition):
+ - r
+ - normal
+ - fr_transform (for anisotropic properties on the reference fracture shape)
+ - fr_center
+ Fields could be constructed by first evaluate the bulk elements and then the fracture elements using fracture fields.
+ """
+
diff --git a/src/bgem/stochastic/frac_isec.py b/src/bgem/stochastic/frac_isec.py
index a914acb..b7d5ccb 100644
--- a/src/bgem/stochastic/frac_isec.py
+++ b/src/bgem/stochastic/frac_isec.py
@@ -114,15 +114,11 @@ def _get_isec_eqn(self):
normal_A = self.fracture_A.normal
normal_B = self.fracture_B.normal
- a_1 = normal_A[0, 0]
- a_2 = normal_A[0, 1]
- a_3 = normal_A[0, 2]
+ a_1, a_2, a_3 = normal_A
+ b_1, b_2, b_3 = normal_B
- b_1 = normal_B[0,0]
- b_2 = normal_B[0,1]
- b_3 = normal_B[0,2]
-
- self.direct_C = np.array([[a_2 * b_3 - a_3 * b_2, a_3 * b_1 - a_1 * b_3, a_1 * b_2 - a_2 * b_1 ]])
+ self.direct_C = np.array([a_2 * b_3 - a_3 * b_2, a_3 * b_1 - a_1 * b_3, a_1 * b_2 - a_2 * b_1 ])
+ assert np.allclose(self.direct_C, np.cross(normal_A, normal_B))
self.direct_C = self.direct_C/np.linalg.norm(self.direct_C)
# np.cross(normal_A,normal_B)#
# Unit direction vector of the intersection line.
@@ -131,23 +127,22 @@ def _get_isec_eqn(self):
b_4 = self.fracture_B.distance
# Distance terms of the normal equations
- # rhs = np.array([[-a_4, -b_4, 0]])
+ rhs = np.array([-a_4, -b_4, 0])
- c_1 = self.direct_C[0,0]
- c_2 = self.direct_C[0,1]
- c_3 = self.direct_C[0,2]
+ c_1, c_2, c_3 = self.direct_C
# Solving system with RHS using Crammer's rule.
x0 = a_4 * (b_3 * c_2 - b_2 * c_3) + b_4 * (a_2 * c_3 - a_3 * c_2)
y0 = b_4 * (a_3 * c_1 - a_1 * c_3) + a_4 * (b_1 * c_3 - b_3 * c_1)
z0 = a_4 * (b_2 * c_1 - b_1 * c_2) + b_4 * (a_1 * c_2 - a_2 * c_1)
- mat = np.array([normal_A[0,:].T,normal_B[0,:].T,self.direct_C[0,:].T])
+ mat = np.stack([normal_A, normal_B, self.direct_C], axis=0) # vectors in rows
dt = np.linalg.det(mat)
- self.x_0 = np.array([[x0, y0, z0]])/dt
-
+ self.x_0 = np.array([x0, y0, z0])/dt
+ x0 = np.linalg.solve(mat, rhs)
+ assert np.allclose(self.x_0, x0)
#testao = self.fracture_A.normal @ self.fracture_A.centre.T + self.fracture_A.distance
#testbo = self.fracture_B.normal @ self.fracture_B.centre.T + self.fracture_B.distance
#testa = self.fracture_A.normal @ self.x_0.T + self.fracture_A.distance
@@ -159,10 +154,10 @@ def _get_isec_eqn(self):
#self.loc_x0_B, self.loc_direct_C_B = self._transform_to_local(self.x_0,self.direct_C, self.fracture_B)
self.loc_x0_A = self.fracture_A.back_transform(self.x_0)
- self.loc_direct_C_A = self.fracture_A.back_transform_clear(self.direct_C)
+ self.loc_direct_C_A = self.fracture_A.back_transform_clear([self.direct_C])[0]
self.loc_x0_B = self.fracture_B.back_transform(self.x_0)
- self.loc_direct_C_B = self.fracture_B.back_transform_clear(self.direct_C)
+ self.loc_direct_C_B = self.fracture_B.back_transform_clear([self.direct_C])[0]
# def _transform_to_local(self,x0,direct,fracture):
# x0 -= fracture.centre
diff --git a/src/bgem/stochastic/fracture.py b/src/bgem/stochastic/fracture.py
index a915514..e69de29 100644
--- a/src/bgem/stochastic/fracture.py
+++ b/src/bgem/stochastic/fracture.py
@@ -1,1379 +0,0 @@
-"""
-Module for statistical description of the fracture networks.
-It provides appropriate statistical models as well as practical sampling methods.
-"""
-
-from typing import *
-import numpy as np
-#import attr
-import attrs
-import math
-import json
-
-
-class LineShape:
- """
- Class represents the line fracture shape.
- The polymorphic `make_approx` method is used to create polygon (approximation in case of disc) of the
- actual fracture.
- """
- _points = np.array([[-0.5, 0, 0], [0.5, 0, 0]])
-
- @classmethod
- def make_approx(cls, x_scale, y_scale, step=None):
- xy_scale = np.array([x_scale, y_scale, 1.0])
- return cls._points[:, :] * xy_scale[None, :]
-
-
-class SquareShape(LineShape):
- """
- Class represents the square fracture shape.
- """
- _points = np.array([[-0.5, -0.5, 0], [0.5, -0.5, 0], [0.5, 0.5, 0], [-0.5, 0.5, 0]])
-
-
-class DiscShape:
- """
- Class represents the square fracture shape.
- """
-
- @classmethod
- def make_approx(cls, x_scale, y_scale, step=1.0):
- n_sides = np.pi * min(x_scale, y_scale) / step
- n_sides = max(4, n_sides)
- angles = np.linspace(0, 2 * np.pi, n_sides, endpoint=False)
- points = np.stack(np.cos(angles) * x_scale, np.sin(angles) * y_scale, np.ones_like(angles))
- return points
-
-class ConvexPolygon:
- """
- Class represents the convex polygon shape.
- """
-
- @classmethod
- def make_approx(cls, x_scale, y_scale, step=1.0):
- n_sides = np.pi * min(x_scale, y_scale) / step
- n_sides = max(4, n_sides)
- angles = np.linspace(0, 2 * np.pi, n_sides, endpoint=False)
- points = np.stack(np.cos(angles) * x_scale, np.sin(angles) * y_scale, np.ones_like(angles))
- return points
-
-
-@attrs.define
-class Fracture:
- """
- Single fracture sample.
- """
- shape_class: Any
- # Basic fracture shape.
- r: float
- # Fracture diameter, laying in XY plane
- center: np.array
- # location of the barycentre of the fracture
- normal: np.array
- # fracture normal
- shape_angle: float
- # angle to rotate the unit shape around z-axis; rotate anti-clockwise
- region_id: int # Union[str, int] = "fracture"
- # name or ID of the physical group
- i_family: int
- # index of the group
- aspect: float = 1
- # aspect ratio of the fracture = y_length / x_length where x_length == r
- region: Any = None
- # auxiliary, TODO: Separate generation of fractures and fracture shapes.
- _rotation_axis: np.array = attrs.field(init=False, default=None)
- # axis of rotation
- _rotation_angle: float = attrs.field(init=False, default=None)
- # angle of rotation around the axis (?? counterclockwise with axis pointing up)
- _distance: float = attrs.field(init=False, default=None)
- # absolute term in plane equation
- _plane_coor_system: np.array = attrs.field(init=False, default=None)
- # local coordinate system
- _vertices: np.array = attrs.field(init=False, default=None)
- # coordinates of the vertices
- _ref_vertices: np.array = attrs.field(init=False, default=None)
- # local coordinates of the vertices (xy - plane)
-
-
- @property
- def vertices(self):
- if self._vertices is None:
- _vertices = self.transform(self.shape_class._points)
- return _vertices
-
- @property
- def ref_vertices(self):
- if self._ref_vertices is None:
- _ref_vertices = self.shape_class._points
- return _ref_vertices
-
- @property
- def rx(self):
- return self.r
-
- @property
- def ry(self):
- return self.r * self.aspect
-
- @property
- def scale(self):
- return [self.r, self.r * self.aspect]
-
- @property
- def rotation_angle(self):
- if self._rotation_angle is None:
- _rotation_axis, _rotation_angle = self.axis_angle()
- return _rotation_angle
-
- @property
- def rotation_axis(self):
- if self._rotation_axis is None:
- _rotation_axis, _rotation_angle = self.axis_angle()
- return _rotation_axis
-
- def axis_angle(self):
- axis_angle = normal_to_axis_angle(self.normal)[0,:]
- _rotation_axis = axis_angle[0:3]
- _rotation_angle = axis_angle[3]
- return _rotation_axis, _rotation_angle
-
- @property
- def distance(self):
- if self._distance is None:
- _distance = -np.dot(self.center, self.normal[0, :])
- return _distance
-
- @property
- def plane_coor_system(self):
- if self._plane_coor_system is None:
- _plane_coor_system = self.transform(np.array([[1.0, 0, 0], [0, 1.0 ,0]]))
- return _plane_coor_system
-
- def get_angle_with_respect_normal(self,vec):
-
- dot = self.normal[0] * vec[0] + self.normal[1] * vec[1] + self.normal[2] * vec[2]
- angle = np.arccos((dot)/np.linalg(vec))
-
- return angle
-
- def internal_point_2d(self, points):
- """
- Determines the interior points of the fracture.
- :param points: array (3,n)
- :return:
- polygon_points as list of int: indices od of the interior points in points
- """
- polygon_points = []
- for i in range(0,points.shape[0]):
- #eps = abs(self.normal[0,0] * points[i,0] + self.normal[0,1] * points[i,1] + self.normal[0,2] * points[i,2] - self.distance)\
- # / math.sqrt(np.linalg.norm(self.normal)**2 + self.distance**2 )
- #if eps < 1e-15:
- # continue
-
- dot = np.zeros((self.ref_vertices.shape[0]))
- for j in range(-1, self.ref_vertices.shape[0]-1):
- bound_vec = self.ref_vertices[j+1] - self.ref_vertices[j]
- sec_vec = self.ref_vertices[j+1] - points[i,:]
- dot[j+1] = bound_vec[0]*sec_vec[0] + bound_vec[1]*sec_vec[1] + bound_vec[2]*sec_vec[2]
-
- if np.sum(dot>0) == self.ref_vertices.shape[0] or np.sum(dot<0) == self.ref_vertices.shape[0]:
- polygon_points.append(i)
-
- #if polygon_points == []:
- # polygon_points = None
-
- return polygon_points
-
- def dist_from_plane(self,point):
- """
- Computes distance from plane
- :param point: array (3,)
- :return: distance as double
- """
-
- dist = self.normal[0,0] * point[0] + self.normal[0,1] * point[1] + self.normal[0,2] * point[2] + self.distance
- return dist
-
-
- def get_isec_with_line(self, x_0, loc_direct):
- """
- Computes intersection of the fracture and line x0 + t*loc_direct (in local coordinates).
- :param x0: array (3,)
- :param loc_direct: array (3,)
- :return:
- x_isec as list of array (3,): real intersection points
- x_isec_false as list of array (3,): intersection points outside the edge of the fracture
- x_isec_start_vert_ind as list of int: index of the nearest initial point of the false intersection point
- """
-
- x_isec = []
- x_isec_false = []
- x_isec_start_vert_ind = []
-
- bound_vec = np.zeros(self.shape_class._points.shape)
- x_0_b = np.zeros(self.shape_class._points.shape)
-
- aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float) # 0.5 *
- points = self.shape_class._points #* aspect[None, :] # self.shape_class._points
-
-
-
- col2 = loc_direct[0]
- for i in range(0, self.shape_class._points.shape[0]-1):
- col1 = points[i] - points[i-1]
- rhs = (x_0 - points[i-1])[0]
- det = col1[0] * col2[1] - col1[1] * col2[0]
- det_x1 = rhs[0] * col2[1] - rhs[1] * col2[0]
- #colinear intersections (joins) should be solved in a different way
- if abs(det) > 0:
- t = det_x1/det
- if (t >= 0.0) and (t <= 1.0):
- x_isec.append(x_0_b[i] + col1 * t)
- else:
- x_isec_false.append(x_0_b[i] + col1 * t)
- if (t < 0.0):
- x_isec_start_vert_ind.append(i-1)
- elif (t > 1.0):
- x_isec_start_vert_ind.append(i)
- else:
- if (i - 1) not in x_isec_start_vert_ind:
- x_isec_start_vert_ind.append(i-1)
- x_isec_false.append([])
- if (i) not in x_isec_start_vert_ind:
- x_isec_start_vert_ind.append(i)
- x_isec_false.append([])
-
- return x_isec, x_isec_false, x_isec_start_vert_ind
-
- def transform(self, points):
- """
- Map local points on the fracture to the 3d scene.
- :param points: array (n, 3)
- :return: transformed points
- """
- aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float)
- t_points= points * aspect[None, :] #[:, :]
- #points[:, :] *= aspect[:,None]
- t_points = rotate(t_points, np.array([0, 0, 1]), self.shape_angle)
- t_points = rotate(t_points, self.rotation_axis, self.rotation_angle)
- t_points += self.center[None, :]
- return t_points
-
- def back_transform(self, points):
- """
- Map points from 3d scene into local coordinate system.
- :param points: array (n, 3)
- :return: transformed points
- """
- aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float)
- t_points = points - self.center[None, :]
- t_points = rotate(t_points, self.rotation_axis, -self.rotation_angle)
- t_points = rotate(t_points, np.array([0, 0, 1]), -self.shape_angle)
- t_points /= aspect[None, :]
- return t_points
-
-
- def transform_clear(self, points):
- """
- Map local points on the fracture to the 3d scene.
- :param points: array (n, 3)
- :return: transformed points
- """
- aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float)
- t_points= points * aspect[None, :] #[:, :]
- #points[:, :] *= aspect[:,None]
- t_points = rotate(t_points, np.array([0, 0, 1]), self.shape_angle)
- t_points = rotate(t_points, self.rotation_axis, self.rotation_angle)
- #t_points += self.centre[None, :]
- return t_points
-
- def back_transform_clear(self, points):
- """
- Map points from 3d scene into local coordinate system.
- :param points: array (n, 3)
- :return: transformed points
- """
- aspect = np.array([self.r, self.aspect * self.r, 1], dtype=float)
- #t_points = points - self.centre[None, :]
- t_points = rotate(points, self.rotation_axis, -self.rotation_angle)
- t_points = rotate(t_points, np.array([0, 0, 1]), -self.shape_angle)
- t_points /= aspect[None, :]
- return t_points
-
-def normal_to_axis_angle(normal): ## todo
- """
-
- """
- z_axis = np.array([0, 0, 1], dtype=float)
- norms = normal / np.linalg.norm(normal, axis=1)[:, None]
- cos_angle = norms @ z_axis
- angles = np.arccos(cos_angle)
- # sin_angle = np.sqrt(1-cos_angle**2)
-
- axes = np.cross(z_axis, norms, axisb=1)
- ax_norm = np.maximum(np.linalg.norm(axes, axis=1), 1e-200)
- axes = axes / ax_norm[:, None]
- #return axes, angles
- return np.concatenate([axes, angles[:, None]], axis=1)
-
-def rotate(vectors, axis=None, angle=0.0, axis_angle=None):
- """
- Rotate given vector around given 'axis' by the 'angle'.
- :param vectors: array of 3d vectors, shape (n, 3)
- :param axis_angle: pass both as array (4,)
- :return: shape (n, 3)
- """
- if axis_angle is not None:
- axis, angle = axis_angle[:3], axis_angle[3]
- if angle == 0:
- return vectors
- vectors = np.atleast_2d(vectors)
- cos_angle, sin_angle = np.cos(angle), np.sin(angle)
- rotated = vectors * cos_angle \
- + np.cross(axis, vectors, axisb=1) * sin_angle \
- + axis[None, :] * (vectors @ axis)[:, None] * (1 - cos_angle)
- # Rodrigues formula for rotation of vectors around axis by an angle
- return rotated
-
-
-
-
-
-@attrs.define
-class VonMisesOrientation:
- """
- Distribution for random orientation in 2d.
- X = east, Y = north
- """
-
- trend: float = 0
- # azimuth (0, 360) of the fractures normal
- concentration: float = 0
- # concentration parameter, 0 = uniformely dispersed, 1 = exect orientation
-
- def sample_axis_angle(self, size=1):
- """
- Sample fracture orientation angles.
- :param size: Number of samples
- :return: shape (n, 4), every row: unit axis vector and angle
- """
- axis_angle = np.tile(np.array([0, 0, 1, 0], dtype=float), size).reshape((size, 4))
- axis_angle[:, 3] = self.sample_angle(size)
- return axis_angle
-
-
- def sample_angle(self, size=1):
- trend = np.radians(self.trend)
- if self.concentration > np.log(np.finfo(float).max):
- return trend + np.zeros(size)
- else:
- if self.concentration == 0:
- return np.random.uniform(size=size) * 2 * np.pi
- else:
- return np.random.vonmises(mu=trend, kappa=self.concentration, size=size)
-
- def sample_normal(self, size=1):
- """
- Draw samples for the fracture normals.
- :param size: number of samples
- :return: array (n, 3)
- """
- angle = self.sample_angle(size)
- return np.stack([np.cos(angle), np.sin(angle), np.zeros_like(angle)], axis=1)
-
-
-@attrs.define
-class FisherOrientation:
- """
- Distribution for random orientation in 3d.
-
- Coordinate system: X - east, Y - north, Z - up
-
- strike, dip - used for the orientation of the planar geological features
- trend, plunge - used for the orientation of the line geological features
-
- As the distribution is considerd as distribution of the fracture normal vectors we use
- trend, plunge as the primal parameters.
- """
-
- trend: float
- # mean fracture normal (pointing down = negative Z)
- # azimuth (0, 360) of the normal's projection to the horizontal plane
- # related term is the strike = trend - 90; that is azimuth of the strike line
- # - the intersection of the fracture with the horizontal plane
- plunge: float
- # mean fracture normal (pointing down = = negative Z)
- # angle (0, 90) between the normal and the horizontal plane
- # related term is the dip = 90 - plunge; that is the angle between the fracture and the horizontal plane
- #
- # strike and dip can by understood as the first two Eulerian angles.
- concentration: float
- # the concentration parameter; 0 = uniform dispersion, infty - no dispersion
-
- @staticmethod
- def strike_dip(strike, dip, concentration):
- """
- Initialize from (strike, dip, concentration)
- """
- return FisherOrientation(strike + 90, 90 - dip, concentration)
-
- def _sample_standard_fisher(self, n) -> np.array:
- """
- Normal vector of random fractures with mean direction (0,0,1).
- :param n:
- :return: array of normals (n, 3)
- """
- if self.concentration > np.log(np.finfo(float).max):
- normals = np.zeros((n, 3))
- normals[:, 2] = 1.0
- else:
- unif = np.random.uniform(size=n)
- psi = 2 * np.pi * np.random.uniform(size=n)
- cos_psi = np.cos(psi)
- sin_psi = np.sin(psi)
- if self.concentration == 0:
- cos_theta = 1 - 2 * unif
- else:
- exp_k = np.exp(self.concentration)
- exp_ik = 1 / exp_k
- cos_theta = np.log(exp_k - unif * (exp_k - exp_ik)) / self.concentration
- sin_theta = np.sqrt(1 - cos_theta ** 2)
- # theta = 0 for the up direction, theta = pi for the down direction
- normals = np.stack((sin_psi * sin_theta, cos_psi * sin_theta, cos_theta), axis=1)
- return normals
-
- def sample_normal(self, size=1):
- """
- Draw samples for the fracture normals.
- :param size: number of samples
- :return: array (n, 3)
- """
- raw_normals = self._sample_standard_fisher(size)
- mean_norm = self._mean_normal()
- axis_angle = normal_to_axis_angle(mean_norm[None, :])
- return rotate(raw_normals, axis_angle=axis_angle[0])
-
-
- #def sample_axis_angle(self, size=1):
- # """
- # Sample fracture orientation angles.
- # :param size: Number of samples
- # :return: shape (n, 4), every row: unit axis vector and angle
- # """
- # normals = self._sample_normal(size)
- # return self.normal_to_axis_angle(normals[:])
-
-
- def _mean_normal(self):
- trend = np.radians(self.trend)
- plunge = np.radians(self.plunge)
- normal = np.array([np.sin(trend) * np.cos(plunge),
- np.cos(trend) * np.cos(plunge),
- -np.sin(plunge)])
-
- # assert np.isclose(np.linalg.norm(normal), 1, atol=1e-15)
- return normal
-
- # def normal_2_trend_plunge(self, normal):
- #
- # plunge = round(degrees(-np.arcsin(normal[2])))
- # if normal[1] > 0:
- # trend = round(degrees(np.arctan(normal[0] / normal[1]))) + 360
- # else:
- # trend = round(degrees(np.arctan(normal[0] / normal[1]))) + 270
- #
- # if trend > 360:
- # trend = trend - 360
- #
- # assert trend == self.trend
- # assert plunge == self.plunge
-
-
-# class Position:
-# def __init__(self):
-
-
-
-
-@attrs.define
-class PowerLawSize:
- """
- Truncated Power Law distribution for the fracture size 'r'.
- The density function:
-
- f(r) = f_0 r ** (-power - 1)
-
- for 'r' in [size_min, size_max], zero elsewhere.
-
- The class allows to set a different (usually reduced) sampling range for the fracture sizes,
- one can either use `set_sample_range` to directly set the sampling range or just increase the lower bound to meet
- prescribed fracture intensity via the `set_range_by_intansity` method.
-
- """
- power: float
- # power of th power law
- diam_range: (float, float)
- # lower and upper bound of the power law for the fracture diameter (size), values for which the intensity is given
- intensity: float
- # number of fractures with size in the size_range per unit volume (denoted as P30 in SKB reports)
-
- sample_range: (float, float) = attrs.field(init=False)
- # range used for sampling., not part of the statistical description
- # default initiaizer:
- @sample_range.default
- def copy_full_range(self):
- return list(self.diam_range).copy() # need copy to preserve original range
-
- @classmethod
- def from_mean_area(cls, power, diam_range, p32, p32_power=None):
- """
- Construct the distribution using the mean arrea (P32) instead of intensity.
- :param p32: mean area of the fractures in given `diam_range`.
- :param p32_power: if the mean area is given for different power parameter.
- :return: PowerLawSize instance.
- """
- if p32_power is None:
- p32_power = power
- return cls(power, diam_range, cls.intensity_for_mean_area(p32, power, diam_range, p32_exp=p32_power))
-
- def cdf(self, x, range):
- """
- Power law distribution function for the given support interval (min, max).
- """
- min, max = range
- pmin = min ** (-self.power)
- pmax = max ** (-self.power)
- return (pmin - x ** (-self.power)) / (pmin - pmax)
-
- def ppf(self, x, range):
- """
- Power law quantile (inverse distribution) function for the given support interval (min, max).
- """
- min, max = range
- pmin = min ** (-self.power)
- pmax = max ** (-self.power)
- scaled = pmin - x * (pmin - pmax)
- return scaled ** (-1 / self.power)
-
- def range_intensity(self, range):
- """
- Computes the fracture intensity (P30) for different given fracture size range.
- :param range: (min, max) - new fracture size range
- """
- a, b = self.diam_range
- c, d = range
- k = self.power
- return self.intensity * (c ** (-k) - d ** (-k)) / (a ** (-k) - b ** (-k))
-
- def set_sample_range(self, sample_range=None):
- """
- Set the range for the fracture sampling.
- :param sample_range: (min, max), None to reset to the full range.
- """
- if sample_range is None:
- sample_range = self.diam_range
- self.sample_range = list(sample_range).copy()
-
- def set_lower_bound_by_intensity(self, intensity):
- """
- Increase lower fracture size bound of the sample range in order to achieve target fracture intensity.
- """
- a, b = self.diam_range
- c, d = self.sample_range
- k = self.power
- lower_bound = (intensity * (a ** (-k) - b ** (-k)) / self.intensity + d ** (-k)) ** (-1 / k)
- self.sample_range[0] = lower_bound
-
- def set_upper_bound_by_intensity(self, intensity):
- """
- Increase lower fracture size bound of the sample range in order to achieve target fracture intensity.
- """
- a, b = self.diam_range
- c, d = self.sample_range
- k = self.power
- upper_bound = (c ** (-k) - intensity * (a ** (-k) - b ** (-k)) / self.intensity ) ** (-1 / k)
- self.sample_range[1] = upper_bound
-
-
- def mean_size(self, volume=1.0):
- """
- :return: Mean number of fractures for given volume
- """
- sample_intensity = self.range_intensity(self.sample_range)
- return sample_intensity * volume
-
- def sample(self, volume, size=None, force_nonempty=False):
- """
- Sample the fracture diameters.
- :param volume: By default the volume and fracture sample intensity is used to determine actual number of the fractures.
- :param size: ... alternatively the prescribed number of fractures can be generated.
- :param force_nonempty: If True at leas one fracture is generated.
- :return: Array of fracture sizes.
- """
- if size is None:
- size = np.random.poisson(lam=self.mean_size(volume), size=1)
- if force_nonempty:
- size = max(1, size)
- #print("PowerLaw sample: ", force_nonempty, size)
- U = np.random.uniform(0, 1, int(size))
- return self.ppf(U, self.sample_range)
-
- def mean_area(self, volume=1.0, shape_area=1.0):
- """
- Compute mean fracture surface area from current sample range intensity.
- :param shape_area: Area of the unit fracture shape (1 for square, 'pi/4' for disc)
- :return:
- """
- sample_intensity = volume * self.range_intensity(self.sample_range)
- a, b = self.sample_range
- exp = self.power
- integral_area = (b ** (2 - exp) - a ** (2 - exp)) / (2 - exp)
- integral_intensity = (b ** (-exp) - a ** (-exp)) / -exp
- p_32 = sample_intensity / integral_intensity * integral_area * shape_area
- return p_32
-
- @staticmethod
- def intensity_for_mean_area(p_32, exp, size_range, shape_area=1.0, p32_exp=None):
- """
- Compute fracture intensity from the mean fracture surface area per unit volume.
- :param p_32: mean fracture surface area
- :param exp: power law exponent
- :param size_range: fracture size range
- :param shape_area: Area of the unit fracture shape (1 for square, 'pi/4' for disc)
- :param p32_exp: possibly different value of the power parameter for which p_32 mean area is given
- :return: p30 - fracture intensity
-
- TODO: modify to general recalculation for two different powers and introduce separate wrapper functions
- for p32 to p30, p32 to p20, etc. Need to design suitable construction methods.
- """
- if p32_exp is None:
- p32_exp = exp
- a, b = size_range
- integral_area = (b ** (2 - p32_exp) - a ** (2 - p32_exp)) / (2 - p32_exp)
- integral_intensity = (b ** (-exp) - a ** (-exp)) / -exp
- return p_32 / integral_area / shape_area * integral_intensity
-
-
-# @attr.s(auto_attribs=True)
-# class PoissonIntensity:
-# p32: float
-# # number of fractures
-# size_min: float
-# #
-# size_max:
-# def sample(self, box_min, box_max):
-
-@attrs.define
-class UniformBoxPosition:
- dimensions: List[float]
- center: List[float] = [0, 0, 0]
-
- def sample(self):
- # size = 1
- # pos = np.empty((size, 3), dtype=float)
- # for i in range(3):
- # pos[:, i] = np.random.uniform(self.center[i] - self.dimensions[i]/2, self.center[i] + self.dimensions[i]/2, size)
- pos = np.empty(3, dtype=float)
- for i in range(3):
- pos[i] = np.random.uniform(self.center[i] - self.dimensions[i] / 2, self.center[i] + self.dimensions[i] / 2,
- size=1)
- return pos
-
-
-@attrs.define
-class ConnectedPosition:
- """
- Generate a fracture positions in such way, that all fractures are connected to some of the initial surfaces.
- Sampling algorithm:
- 0. sampling position of the i-th fracture:
- 1. select random surface using theoretical frequencies of the fractures:
- f_k = N_k / (N_f - k), with N_k ~ S_k, S_k is the area of k-th surface
- ... this is done by taking a random number from (0, sum f_k) and determining 'k'
- by search in the array of cumulative frequencies (use dynarray package).
- 2. one point of the N_k points in k-th surface
- 3. center of the new fracture such, that it contains the selected point
-
- N_k is obtained as:
- 1. generate N_p * S_i points
- 2. remove points that are close to some existing points on other fractures
-
- Possible improvements:
- Instead of grouping points according to fractures, make groups of points according to some volume cells.
- This way one can obtain more uniform distribution over given volume.
- """
-
- confining_box: List[float]
- # dimensions of the confining box (center in origin)
- point_density: float
- # number of points per unit square
-
- # List of fractures, fracture is the transformation matrix (4,3) to transform from the local UVW coordinates to the global coordinates XYZ.
- # Fracture in UvW: U=(-1,1), V=(-1,1), W=0.
-
- all_points: List[np.array] = []
- # all points on surfaces
- surf_points: List[int] = []
- # len = n surfaces + 1 - start of fracture's points in all_points, last entry is number of all points
- surf_cum_freq: List[float] = []
-
- # len = n surfaces + 1 - cumulative mean frequencies for surfaces; total_freq - the last entry is surf_cum_freq
- # used for efficient sampling of the parent fracture index
-
- @classmethod
- def init_surfaces(cls, confining_box, n_fractures, point_density, points):
- """
- :param confinign_box: dimensions of axis aligned box, points out of this box are excluded.
- :param point_density: number of points per unit square
- :param points: List of 3d points on the virtual initial surface.
- :return:
- """
- np = len(points)
- freq = np / (n_fractures - 0)
- return cls(confining_box, point_density, points.copy(), [0, np], [0, freq])
-
- # TODO continue
- def sample(self, diameter, axis, angle, shape_angle):
- """
- Sample position of the fracture with given shape and orientation.
- :return:
- sampling position of the i-th fracture:
- 1. select random surface using theoretical frequencies of the fractures:
- f_k = N_k / (N_f - k), with N_k ~ S_k, S_k is the area of k-th surface
- ... this is done by taking a random number from (0, sum f_k) and determining 'k'
- by search in the array of cumulative frequencies (use dynarray package).
- 2. one point of the N_k points in k-th surface
- 3. center of the new fracture such, that it contains the selected point
-
- N_k is obtained as:
- 1. generate N_p * S_i points
- 2. remove points that are close to some existing points on other fractures
-
- """
-
- if len(self.fractures) == 0:
- self.confining_box = np.array(self.confining_box)
- # fill by box sides
- self.points = np.empty((0, 3))
- for fr_mat in self.boxes_to_fractures(self.init_boxes):
- self.add_fracture(fr_mat)
- # assert len(self.fractures) == len(self.surfaces)
-
- q = np.random.uniform(-1, 1, size=3)
- q[2] = 0
- uvq_vec = np.array([[1, 0, 0], [0, 1, 0], q])
- uvq_vec *= diameter / 2
- uvq_vec = FisherOrientation.rotate(uvq_vec, np.array([0, 0, 1]), shape_angle)
- uvq_vec = FisherOrientation.rotate(uvq_vec, axis, angle)
-
- # choose the fracture to prolongate
- i_point = np.random.randint(0, len(self.points), size=1)[0]
- center = self.points[i_point] + uvq_vec[2, :]
- self.add_fracture(self.make_fracture(center, uvq_vec[0, :], uvq_vec[1, :]))
- return center
-
- def add_fracture(self, fr_mat):
- i_fr = len(self.fractures)
- self.fractures.append(fr_mat)
- surf = np.linalg.norm(fr_mat[:, 2])
-
- points_density = 0.01
- # mean number of points per unit square meter
- points_mean_dist = 1 / np.sqrt(points_density)
- n_points = np.random.poisson(lam=surf * points_density, size=1)
- uv = np.random.uniform(-1, 1, size=(2, n_points[0]))
- fr_points = fr_mat[:, 0:2] @ uv + fr_mat[:, 3][:, None]
- fr_points = fr_points.T
- new_points = []
-
- for pt in fr_points:
- # if len(self.points) >0:
- dists_short = np.linalg.norm(self.points[:, :] - pt[None, :], axis=1) < points_mean_dist
- # else:
- # dists_short = []
- if np.any(dists_short):
- # substitute current point for a choosed close points
- i_short = np.random.choice(np.arange(len(dists_short))[dists_short])
- self.points[i_short] = pt
- # self.point_fracture = i_fr
- else:
- # add new points that are in the confining box
- if np.all((pt - self.confining_box / 2) < self.confining_box):
- new_points.append(pt)
- # self.point_fracture.append(i_fr)
- if new_points:
- self.points = np.concatenate((self.points, new_points), axis=0)
-
- @classmethod
- def boxes_to_fractures(cls, boxes):
- fractures = []
- for box in boxes:
- box = np.array(box)
- ax, ay, az, bx, by, bz = range(6)
- sides = [[ax, ay, az, bx, ay, az, ax, ay, bz],
- [ax, ay, az, ax, by, az, bx, ay, az],
- [ax, ay, az, ax, ay, bz, ax, by, az],
- [bx, by, bz, ax, by, bz, bx, by, az],
- [bx, by, bz, bx, ay, bz, ax, by, bz],
- [bx, by, bz, bx, by, az, bx, ay, bz]]
- for side in sides:
- v0 = box[side[0:3]]
- v1 = box[side[3:6]]
- v2 = box[side[6:9]]
- fractures.append(cls.make_fracture(v0, v1 / 2, v2 / 2))
- return fractures
-
- @classmethod
- def make_fracture(cls, center, u_vec, v_vec):
- """
- Construct transformation matrix from one square cornerthree square corners,
- """
- w_vec = np.cross(u_vec, v_vec)
- return np.stack((u_vec, v_vec, w_vec, center), axis=1)
-
-
-@attrs.define
-class FrFamily:
- """
- Describes a single fracture family with defined orientation and shape distributions.
- """
- name: str
- orientation: FisherOrientation
- size: PowerLawSize
- shape_angle: VonMisesOrientation
-
- @staticmethod
- def from_cfg_3d(family):
- fisher_orientation = FisherOrientation(family["trend"], family["plunge"], family["concentration"])
- size_range = (family["r_min"], family["r_max"])
- power_law_size = PowerLawSize.from_mean_area(family["power"], size_range, family["p_32"])
- assert np.isclose(family["p_32"], power_law_size.mean_area())
- shape_angle = VonMisesOrientation(trend=0, concentration=0)
- return FrFamily(family["name"], fisher_orientation, power_law_size, shape_angle)
-
- @staticmethod
- def from_cfg_2d(family):
- orientation = FisherOrientation(0, 90, np.inf)
- size_range = (family["r_min"], family["r_max"])
- power_law_size = PowerLawSize.from_mean_area(family["power"], size_range, family["p_32"])
- assert np.isclose(family["p_32"], power_law_size.mean_area())
- shape_angle = VonMisesOrientation(family["trend"], family["concentration"])
- return FrFamily(family["name"], orientation, power_law_size, shape_angle)
-
-
-
-@attrs.define
-class Population:
- """
- Data class to describe whole population of fractures, several families.
- Supports sampling across the families.
- """
- # Attributes
- domain: Tuple[float, float, float]
- # dimensions of the box domain, one dimension is 0 for 2d population
- families: List[FrFamily]
- #
- shape_class: LineShape = SquareShape
-
- @property
- def volume(self):
- return np.product([l if l>0 else 1.0 for l in self.domain])
-
- @classmethod
- def initialize_3d(cls, families: List[Dict[str, Any]], box):
- """
- Load families from a list of dict, with keywords: [ name, trend, plunge, concentration, power, r_min, r_max, p_32 ]
- Assuming fixed statistical model: Fischer, Uniform, PowerLaw Poisson
- """
- families = [FrFamily.from_cfg_3d(family) for family in families]
- return cls(box, families)
-
- @classmethod
- def initialize_2d(cls, families: List[Dict[str, Any]], box):
- """
- Load families from a list of dict, with keywords: [ name, trend, plunge, concentration, power, r_min, r_max, p_32 ]
- Assuming fixed statistical model: Fischer, Uniform, PowerLaw Poisson
- :param families json_file: JSON file with families data
- """
- families = [FrFamily.from_cfg_2d(family) for family in families]
- assert len(box) == 3 and sum((l > 0 for l in box)) == 2
- return cls(box, families)
-
- def init_from_json(self, json_file):
- """
- Load families from a JSON file. Assuming fixed statistical model: Fischer, Uniform, PowerLaw Poisson
- :param json_file: JSON file with families data
- """
- with open(json_file) as f:
- self.initialize(json.load(f))
-
- def init_from_yaml(self, yaml_file):
- """
- Load families from a YAML file. Assuming fixed statistical model: Fischer, Uniform, PowerLaw Poisson
- :param json_file: YAML file with families data
- """
- with open(yaml_file) as f:
- self.initialize(json.load(f))
-
-
-
-
-
- def mean_size(self):
- sizes = [family.size.mean_size(self.volume) for family in self.families]
- return sum(sizes)
-
- def set_sample_range(self, sample_range, sample_size=None):
- """
- Set sample range for fracture diameter.
- :param sample_range: (min_bound, max_bound) - one of these can be None if 'sample_size' is provided
- this bound is set to match mean number of fractures
- :param sample_size: If provided, the None bound is changed to achieve given mean number of fractures.
- If neither of the bounds is None, the lower one is reset.
- :return:
- TODO: split fracture population (stochastic model) and fracture generator with
- particular sample size and domain. Make both objects frozen.
- """
- min_size, max_size = sample_range
- for f in self.families:
- r_min, r_max = f.size.diam_range
- if min_size is not None:
- r_min = min_size
- if max_size is not None:
- r_max = max_size
- f.size.set_sample_range((r_min, r_max))
- if sample_size is not None:
- family_sizes = [family.size.mean_size(self.volume) for family in self.families]
- total_size = np.sum(family_sizes)
-
- if max_size is None:
- for f, size in zip(self.families, family_sizes):
- family_intensity = size / total_size * sample_size / self.volume
- f.size.set_upper_bound_by_intensity(family_intensity)
- else:
- for f, size in zip(self.families, family_sizes):
- family_intensity = size / total_size * sample_size / self.volume
- f.size.set_lower_bound_by_intensity(family_intensity)
-
-
- def sample(self, pos_distr=None, keep_nonempty=False) -> List[Fracture]:
- """
- Provide a single fracture set sample from the population.
- :param pos_distr: Fracture position distribution, common to all families.
- An object with method .sample(size) returning array of positions (size, 3).
- :return: List of FractureShapes.
- """
- if pos_distr is None:
- size = np.cbrt(self.volume)
- pos_distr = UniformBoxPosition([size, size, size])
-
- fractures = []
- for ifam, f in enumerate(self.families):
- name = f.name
- diams = f.size.sample(self.volume)
- fr_normals = f.orientation.sample_normal(size=len(diams))
- #fr_axis_angle = f.orientation.sample_axis_angle(size=len(diams))
- shape_angle = f.shape_angle.sample_angle(len(diams))
- #np.random.uniform(0, 2 * np.pi, len(diams))
- for r, normal, sa in zip(diams, fr_normals, shape_angle):
- #axis, angle = aa[:3], aa[3]
- center = pos_distr.sample()
- fractures.append(Fracture(self.shape_class, r, center, normal[None,:], sa, name, ifam, 1))
- return fractures
-
-
-def plotly_fractures(fr_set, fr_points):
- """
- Plot generated fractures.
- :param fr_set: List[FractureShape]
- :param fr_set: List[np.array(n, 2)] local point coordinates on fractures
- :return:
- """
- import plotly.offline as pl
- import plotly.graph_objs as go
- # import plotly.graph_objects as go
- for ifr, (fr, points) in enumerate(zip(fr_set, fr_points)):
- n_side = 5
- boundary = np.empty((4, n_side, 3))
- corners = np.array([[-0.5, -0.5, 0], [0.5, -0.5, 0], [0.5, 0.5, 0], [-0.5, 0.5, 0]])
- for s in range(4):
- start, end = corners[s, :], corners[(s + 1) % 4, :]
- boundary[s, :, :] = start[None, :] + (end - start)[None, :] * np.linspace(0, 1, n_side, endpoint=False)[:,
- None]
- boundary = boundary.reshape((-1, 3))
- boundary = fr.transform(boundary)
- points = fr.transform(points)
-
- fig = go.Figure(data=[
- go.Scatter3d(x=boundary[:, 0], y=boundary[:, 1], z=boundary[:, 2],
- marker=dict(size=1, color='blue')),
- go.Scatter3d(x=points[:, 0], y=points[:, 1], z=points[:, 2],
- marker=dict(size=1.5, color='red'))
- ])
- fig.update_layout(
- scene=dict(
- # xaxis=dict(range=[-2, 2]),
- # yaxis=dict(range=[-2, 2]),
- # zaxis=dict(range=[-1, 1]),
- aspectmode='manual',
- aspectratio=dict(x=1, y=1, z=1)
-
- ),
- )
- pl.plot(fig, filename='fractures.html')
-
-
-#
-# class FractureGenerator:
-# def __init__(self, frac_type):
-# self.frac_type = frac_type
-#
-# def generate_fractures(self, min_distance, min_radius, max_radius):
-# fractures = []
-#
-# for i in range(self.frac_type.n_fractures):
-# x = uniform(2 * min_distance, 1 - 2 * min_distance)
-# y = uniform(2 * min_distance, 1 - 2 * min_distance)
-# z = uniform(2 * min_distance, 1 - 2 * min_distance)
-#
-# tpl = TPL(self.frac_type.kappa, self.frac_type.r_min, self.frac_type.r_max, self.frac_type.r_0)
-# r = tpl.rnd_number()
-#
-# orient = Orientation(self.frac_type.trend, self.frac_type.plunge, self.frac_type.k)
-# axis, angle = orient.compute_axis_angle()
-#
-# fd = FractureData(x, y, z, r, axis[0], axis[1], axis[2], angle, i * 100)
-#
-# fractures.append(fd)
-#
-# return fractures
-#
-# def write_fractures(self, fracture_data, file_name):
-# with open(file_name, "w") as writer:
-# for d in fracture_data:
-# writer.write("%f %f %f %f %f %f %f %f %d\n" % (d.centre[0], d.centre[1], d.centre[2], d.r, d.rotation_axis[0],
-# d.rotation_axis[1], d.rotation_axis[2], d.rotation_angle, d.tag))
-#
-# def read_fractures(self, file_name):
-# data = []
-# with open(file_name, "r") as reader:
-# for l in reader.readlines():
-# x, y, z, r, axis_0, axis_1, axis_2, angle = [float(i) for i in l.split(' ')[:-1]]
-# tag = int(l.split(' ')[-1])
-# d = FractureData(x, y, z, r, axis_0, axis_1, axis_2, angle, tag)
-# data.append(d)
-#
-# return data
-#
-
-
-def unit_square_vtxs():
- return np.array([
- [-0.5, -0.5, 0],
- [0.5, -0.5, 0],
- [0.5, 0.5, 0],
- [-0.5, 0.5, 0]])
-
-
-
-
-class Fractures:
- # regularization of 2d fractures
- def __init__(self, fractures, epsilon):
- self.epsilon = epsilon
- self.fractures = fractures
- self.points = []
- self.lines = []
- self.pt_boxes = []
- self.line_boxes = []
- self.pt_bih = None
- self.line_bih = None
- self.fracture_ids = []
- # Maps line to its fracture.
-
- self.make_lines()
- self.make_bihs()
-
- def make_lines(self):
- # sort from large to small fractures
- self.fractures.sort(key=lambda fr:fr.rx, reverse=True)
- base_line = np.array([[-0.5, 0, 0], [0.5, 0, 0]])
- for i_fr, fr in enumerate(self.fractures):
- line = FisherOrientation.rotate(base_line * fr.rx, np.array([0, 0, 1]), fr.shape_angle)
- line += fr.center
- i_pt = len(self.points)
- self.points.append(line[0])
- self.points.append(line[1])
- self.lines.append((i_pt, i_pt+1))
- self.fracture_ids.append(i_fr)
-
- def get_lines(self, fr_range):
- lines = {}
- fr_min, fr_max = fr_range
- for i, (line, fr) in enumerate(zip(self.lines, self.fractures)):
- if fr_min <= fr.rx < fr_max:
- lines[i] = [self.points[p][:2] for p in line]
- return lines
-
- def make_bihs(self):
- import bih
- shift = np.array([self.epsilon, self.epsilon, 0])
- for line in self.lines:
- pt0, pt1 = self.points[line[0]], self.points[line[1]]
- b0 = [(pt0 - shift).tolist(), (pt0 + shift).tolist()]
- b1 = [(pt1 - shift).tolist(), (pt1 + shift).tolist()]
- box_pt0 = bih.AABB(b0)
- box_pt1 = bih.AABB(b1)
- line_box = bih.AABB(b0 + b1)
- self.pt_boxes.extend([box_pt0, box_pt1])
- self.line_boxes.append(line_box)
- self.pt_bih = bih.BIH()
- self.pt_bih.add_boxes(self.pt_boxes)
- self.line_bih = bih.BIH()
- self.line_bih.add_boxes(self.line_boxes)
- self.pt_bih.construct()
- self.line_bih.construct()
-
- def find_root(self, i_pt):
- i = i_pt
- while self.pt_map[i] != i:
- i = self.pt_map[i]
- root = i
- i = i_pt
- while self.pt_map[i] != i:
- j = self.pt_map[i]
- self.pt_map[i] = root
- i = j
- return root
-
- def snap_to_line(self, pt, pt0, pt1):
- v = pt1 - pt0
- v /= np.linalg.norm(v)
- t = v @ (pt - pt0)
- if 0 < t < 1:
- projected = pt0 + t * v
- if np.linalg.norm(projected - pt) < self.epsilon:
- return projected
- return pt
-
-
-
- def simplify(self):
- self.pt_map = list(range(len(self.points)))
- for i_pt, point in enumerate(self.points):
- pt = point.tolist()
- for j_pt_box in self.pt_bih.find_point(pt):
- if i_pt != j_pt_box and j_pt_box == self.pt_map[j_pt_box] and self.pt_boxes[j_pt_box].contains_point(pt):
- self.pt_map[i_pt] = self.find_root(j_pt_box)
- break
- new_lines = []
- new_fr_ids = []
- for i_ln, ln in enumerate(self.lines):
- pt0, pt1 = ln
- pt0, pt1 = self.find_root(pt0), self.find_root(pt1)
- if pt0 != pt1:
- new_lines.append((pt0, pt1))
- new_fr_ids.append(self.fracture_ids[i_ln])
- self.lines = new_lines
- self.fracture_ids = new_fr_ids
-
- for i_pt, point in enumerate(self.points):
- if self.pt_map[i_pt] == i_pt:
- pt = point.tolist()
- for j_line in self.line_bih.find_point(pt):
- line = self.lines[j_line]
- if i_pt != line[0] and i_pt != line[1] and self.line_boxes[j_line].contains_point(pt):
- pt0, pt1 = self.points[line[0]], self.points[line[1]]
- self.points[i_pt] = self.snap_to_line(point, pt0, pt1)
- break
-
- def line_fragment(self, i_ln, j_ln):
- """
- Compute intersection of the two lines and if its position is well in interior
- of both lines, benote it as the fragmen point for both lines.
- """
- pt0i, pt1i = (self.points[ipt] for ipt in self.lines[i_ln])
- pt0j, pt1j = (self.points[ipt] for ipt in self.lines[j_ln])
- A = np.stack([pt1i - pt0i, -pt1j + pt0j], axis=1)
- b = -pt0i + pt0j
- ti, tj = np.linalg.solve(A, b)
- if self.epsilon <= ti <= 1 - self.epsilon and self.epsilon <= tj <= 1 - self.epsilon:
- X = pt0i + ti * (pt1i - pt0i)
- ix = len(self.points)
- self.points.append(X)
- self._fragment_points[i_ln].append((ti, ix))
- self._fragment_points[j_ln].append((tj, ix))
-
- def fragment(self):
- """
- Fragment fracture lines, update map from new line IDs to original fracture IDs.
- :return:
- """
- new_lines = []
- new_fracture_ids = []
- self._fragment_points = [[] for l in self.lines]
- for i_ln, line in enumerate(self.lines):
- for j_ln in self.line_bih.find_box(self.line_boxes[i_ln]):
- if j_ln > i_ln:
- self.line_fragment(i_ln, j_ln)
- # i_ln line is complete, we can fragment it
- last_pt = self.lines[i_ln][0]
- fr_id = self.fracture_ids[i_ln]
- for t, ix in sorted(self._fragment_points[i_ln]):
- new_lines.append(last_pt, ix)
- new_fracture_ids.append(fr_id)
- last_pt = ix
- new_lines.append(last_pt, self.lines[i_ln][1])
- new_fracture_ids.append(fr_id)
- self.lines = new_lines
- self.fracture_ids = new_fracture_ids
-
-
-
-
-
- # def compute_transformed_shapes(self):
- # n_frac = len(self.fractures)
- #
- # unit_square = unit_square_vtxs()
- # z_axis = np.array([0, 0, 1])
- # squares = np.tile(unit_square[None, :, :], (n_frac, 1, 1))
- # center = np.empty((n_frac, 3))
- # trans_matrix = np.empty((n_frac, 3, 3))
- # for i, fr in enumerate(self.fractures):
- # vtxs = squares[i, :, :]
- # vtxs[:, 1] *= fr.aspect
- # vtxs[:, :] *= fr.r
- # vtxs = FisherOrientation.rotate(vtxs, z_axis, fr.shape_angle)
- # vtxs = FisherOrientation.rotate(vtxs, fr.rotation_axis, fr.rotation_angle)
- # vtxs += fr.centre
- # squares[i, :, :] = vtxs
- #
- # center[i, :] = fr.centre
- # u_vec = vtxs[1] - vtxs[0]
- # u_vec /= (u_vec @ u_vec)
- # v_vec = vtxs[2] - vtxs[0]
- # u_vec /= (v_vec @ v_vec)
- # w_vec = FisherOrientation.rotate(z_axis, fr.rotation_axis, fr.rotation_angle)
- # trans_matrix[i, :, 0] = u_vec
- # trans_matrix[i, :, 1] = v_vec
- # trans_matrix[i, :, 2] = w_vec
- # self.squares = squares
- # self.center = center
- # self.trans_matrix = trans_matrix
- #
- # def snap_vertices_and_edges(self):
- # n_frac = len(self.fractures)
- # epsilon = 0.05 # relaitve to the fracture
- # min_unit_fr = np.array([0 - epsilon, 0 - epsilon, 0 - epsilon])
- # max_unit_fr = np.array([1 + epsilon, 1 + epsilon, 0 + epsilon])
- # cos_limit = 1 / np.sqrt(1 + (epsilon / 2) ** 2)
- #
- # all_points = self.squares.reshape(-1, 3)
- #
- # isec_condidates = []
- # wrong_angle = np.zeros(n_frac)
- # for i, fr in enumerate(self.fractures):
- # if wrong_angle[i] > 0:
- # isec_condidates.append(None)
- # continue
- # projected = all_points - self.center[i, :][None, :]
- # projected = np.reshape(projected @ self.trans_matrix[i, :, :], (-1, 4, 3))
- #
- # # get bounding boxes in the loc system
- # min_projected = np.min(projected, axis=1) # shape (N, 3)
- # max_projected = np.max(projected, axis=1)
- # # flag fractures that are out of the box
- # flag = np.any(np.logical_or(min_projected > max_unit_fr[None, :], max_projected < min_unit_fr[None, :]),
- # axis=1)
- # flag[i] = 1 # omit self
- # candidates = np.nonzero(flag == 0)[0] # indices of fractures close to 'fr'
- # isec_condidates.append(candidates)
- # # print("fr: ", i, candidates)
- # for i_fr in candidates:
- # if i_fr > i:
- # cos_angle_of_normals = self.trans_matrix[i, :, 2] @ self.trans_matrix[i_fr, :, 2]
- # if cos_angle_of_normals > cos_limit:
- # wrong_angle[i_fr] = 1----
- # print("wrong_angle: ", i, i_fr)
- #
- # # atract vertices
- # fr = projected[i_fr]
- # flag = np.any(np.logical_or(fr > max_unit_fr[None, :], fr < min_unit_fr[None, :]), axis=1)
- # print(np.nonzero(flag == 0))
-
-
-def fr_intersect(fractures):
- """
- 1. create fracture shape vertices (rotated, translated) square
- - create vertices of the unit shape
- - use FisherOrientation.rotate
- 2. intersection of a line with plane/square
- 3. intersection of two squares:
- - length of the intersection
- - angle
- -
- :param fractures:
- :return:
- """
-
- # project all points to all fractures (getting local coordinates on the fracture system)
- # fracture system axis:
- # u_vec = vtxs[1] - vtxs[0]
- # v_vec = vtxs[2] - vtxs[0]
- # w_vec ... unit normal
- # fractures with angle that their max distance in the case of intersection
- # is not greater the 'epsilon'
-
-
-
-# class Quat:
-# """
-# Simple quaternion class as numerically more stable alternative to the Orientation methods.
-# TODO: finish, test, substitute
-# """
-#
-# def __init__(self, q):
-# self.q = q
-#
-# def __matmul__(self, other: 'Quat') -> 'Quat':
-# """
-# Composition of rotations. Quaternion multiplication.
-# """
-# w1, x1, y1, z1 = self.q
-# w2, x2, y2, z2 = other.q
-# w = w1 * w2 - x1 * x2 - y1 * y2 - z1 * z2
-# x = w1 * x2 + x1 * w2 + y1 * z2 - z1 * y2
-# y = w1 * y2 + y1 * w2 + z1 * x2 - x1 * z2
-# z = w1 * z2 + z1 * w2 + x1 * y2 - y1 * x2
-# return Quat((w, x, y, z))
-#
-# @staticmethod
-# def from_euler(a: float, b: float, c: float) -> 'Quat':
-# """
-# X-Y-Z Euler angles to quaternion
-# :param a: angle to rotate around Z
-# :param b: angle to rotate around X
-# :param c: angle to rotate around Z
-# :return: Quaterion for composed rotation.
-# """
-# return Quat([np.cos(a / 2), 0, 0, np.sin(a / 2)]) @ \
-# Quat([np.cos(b / 2), 0, np.sin(b / 2), 0]) @ \
-# Quat([np.cos(c / 2), np.sin(c / 2), 0, 0])
-#
-# def axisangle_to_q(self, v, theta):
-# # convert rotation given by axis 'v' and angle 'theta' to quaternion representation
-# v = v / np.linalg.norm(v)
-# x, y, z = v
-# theta /= 2
-# w = np.cos(theta)
-# x = x * np.sin(theta)
-# y = y * np.sin(theta)
-# z = z * np.sin(theta)
-# return w, x, y, z
-#
-# def q_to_axisangle(self, q):
-# # convert from quaternion to rotation given by axis and angle
-# w, v = q[0], q[1:]
-# theta = np.acos(w) * 2.0
-# return v / np.linalg.norm(v), theta
diff --git a/src/bgem/stochastic/isec_conflict.py b/src/bgem/stochastic/isec_conflict.py
index f530f1f..74603a4 100644
--- a/src/bgem/stochastic/isec_conflict.py
+++ b/src/bgem/stochastic/isec_conflict.py
@@ -45,7 +45,7 @@ def solve_conflict(self,fracture_A, fracture_B,points_false_A,points_init_ind_A)
dist = []
A_vert = fracture_A.vertices[points_init_ind_A[i]]
- if points_false_A[i] != []:
+ if len(points_false_A[i]) > 0:
A_points_false = fracture_A.transform(points_false_A[i])
loc_B_points_false = fracture_B.back_transform(A_points_false)
ind_points_false = fracture_B.internal_point_2d(loc_B_points_false)
diff --git a/src/bgem/stochastic/ploting.py b/src/bgem/stochastic/ploting.py
index e5375db..ec030da 100644
--- a/src/bgem/stochastic/ploting.py
+++ b/src/bgem/stochastic/ploting.py
@@ -1,3 +1,80 @@
"""
-Various ploting for DFN inspection.
-"""
\ No newline at end of file
+Various DFN plotting and vizualization functions.
+"""
+
+
+
+def plotly_fractures(fr_set, fr_points):
+ """
+ Plot generated fractures.
+ :param fr_set: List[FractureShape]
+ :param fr_set: List[np.array(n, 2)] local point coordinates on fractures
+ :return:
+ """
+ import plotly.offline as pl
+ import plotly.graph_objs as go
+ # import plotly.graph_objects as go
+ for ifr, (fr, points) in enumerate(zip(fr_set, fr_points)):
+ n_side = 5
+ boundary = np.empty((4, n_side, 3))
+ corners = np.array([[-0.5, -0.5, 0], [0.5, -0.5, 0], [0.5, 0.5, 0], [-0.5, 0.5, 0]])
+ for s in range(4):
+ start, end = corners[s, :], corners[(s + 1) % 4, :]
+ boundary[s, :, :] = start[None, :] + (end - start)[None, :] * np.linspace(0, 1, n_side, endpoint=False)[:,
+ None]
+ boundary = boundary.reshape((-1, 3))
+ boundary = fr.transform(boundary)
+ points = fr.transform(points)
+
+ fig = go.Figure(data=[
+ go.Scatter3d(x=boundary[:, 0], y=boundary[:, 1], z=boundary[:, 2],
+ marker=dict(size=1, color='blue')),
+ go.Scatter3d(x=points[:, 0], y=points[:, 1], z=points[:, 2],
+ marker=dict(size=1.5, color='red'))
+ ])
+ fig.update_layout(
+ scene=dict(
+ # xaxis=dict(range=[-2, 2]),
+ # yaxis=dict(range=[-2, 2]),
+ # zaxis=dict(range=[-1, 1]),
+ aspectmode='manual',
+ aspectratio=dict(x=1, y=1, z=1)
+
+ ),
+ )
+ pl.plot(fig, filename='fractures.html')
+
+
+
+
+def plot_fr_orientation(fractures):
+ family_dict = collections.defaultdict(list)
+ for fr in fractures:
+ x, y, z = \
+ fracture.FisherOrientation.rotate(np.array([0, 0, 1]), axis=fr.rotation_axis, angle=fr.rotation_angle)[0]
+ family_dict[fr.region].append([
+ to_polar(z, y, x),
+ to_polar(z, x, -y),
+ to_polar(y, x, z)
+ ])
+
+ import matplotlib.pyplot as plt
+ fig, axes = plt.subplots(1, 3, subplot_kw=dict(projection='polar'))
+ for name, data in family_dict.items():
+ # data shape = (N, 3, 2)
+ data = np.array(data)
+ for i, ax in enumerate(axes):
+ phi = data[:, i, 0]
+ r = data[:, i, 1]
+ c = ax.scatter(phi, r, cmap='hsv', alpha=0.75, label=name)
+ axes[0].set_title("X-view, Z-north")
+ axes[1].set_title("Y-view, Z-north")
+ axes[2].set_title("Z-view, Y-north")
+ for ax in axes:
+ ax.set_theta_zero_location("N")
+ ax.set_theta_direction(-1)
+ ax.set_ylim(0, 1)
+ fig.legend(loc=1)
+ fig.savefig("fracture_orientation.pdf")
+ plt.close(fig)
+ # plt.show()
diff --git a/src/bgem/transform.py b/src/bgem/transform.py
index 8f14f26..246dee3 100644
--- a/src/bgem/transform.py
+++ b/src/bgem/transform.py
@@ -2,6 +2,7 @@
Linear transformation in 3d space.
"""
import copy
+import numbers
from typing import *
import numpy as np
from bgem import ParamError
@@ -21,7 +22,7 @@ def check_matrix(mat, shape, values, idx=()):
try:
if len(shape) == 0:
- if not isinstance(mat, values):
+ if not issubclass(type(mat), values):
raise ParamError("Element at index {} of type {}, expected instance of {}.".format(idx, type(mat), values))
else:
@@ -81,12 +82,18 @@ def __init__(self, matrix: Matrix = None):
Constructor for elementary afine transformation.
:param matrix: Transformation matrix 3x4. First three columns forms the linear transformation matrix.
Last column is the translation vector.
+ The full affine transform matrix is available through the full_affine_matrix property.
+ TODO: allow passing the full affine transfrom
"""
self._composition = []
if matrix is None:
self._matrix = None
else:
- check_matrix(matrix, [3, 4], (int, float))
+ matrix = np.array(matrix)
+ if matrix.shape == (4, 4):
+ assert np.allclose(matrix[3], [0, 0, 0, 1])
+ matrix = matrix[:3]
+ check_matrix(matrix, [3, 4], (numbers.Real,))
self._matrix = np.array(matrix, dtype=float)
def is_composed(self) -> bool :
@@ -105,6 +112,9 @@ def matrix(self):
else:
return self._matrix
+ @property
+ def affine_matrix(self):
+ return np.concatenate((self._matrix, np.array([[0,0,0,1]])))
def __call__(self, points:np.array) -> np.array:
"""
@@ -175,18 +185,19 @@ def rotate(self, axis, angle, center=(0, 0, 0)):
rotate, and then shift back.
"""
matrix = Transform._identity_matrix()
- center = np.array(center, dtype=float)
- axis = np.array(axis, dtype=float)
- axis /= np.linalg.norm(axis)
-
- W = np.array(
- [[0, -axis[2], axis[1]],
- [axis[2], 0, -axis[0]],
- [-axis[1], axis[0], 0]])
- M = np.eye(3) + np.sin(angle) * W + 2 * np.sin(angle/2) ** 2 * W @ W
- matrix[:, 3] -= center
- matrix = M @ matrix
- matrix[:, 3] += center
+ if angle != 0.0:
+ center = np.array(center, dtype=float)
+ axis = np.array(axis, dtype=float)
+ axis /= np.linalg.norm(axis)
+
+ W = np.array(
+ [[0, -axis[2], axis[1]],
+ [axis[2], 0, -axis[0]],
+ [-axis[1], axis[0], 0]])
+ M = np.eye(3) + np.sin(angle) * W + 2 * np.sin(angle/2) ** 2 * W @ W
+ matrix[:, 3] -= center
+ matrix = M @ matrix
+ matrix[:, 3] += center
return Transform(matrix) @ self
def scale(self, scale_vector, center=(0, 0, 0)):
diff --git a/src/bgem/upscale/__init__.py b/src/bgem/upscale/__init__.py
new file mode 100644
index 0000000..3fd3cad
--- /dev/null
+++ b/src/bgem/upscale/__init__.py
@@ -0,0 +1,4 @@
+from .fem import Fe, flat_dim, tensor_dim, Grid, FEM, upscale
+from .fields import voigt_to_tn, tn_to_voigt
+from .voxelize import FracturedDomain, Intersection, FracturedMedia, \
+ intersection_decovalex, intersection_cell_corners, fr_conductivity
\ No newline at end of file
diff --git a/src/bgem/upscale/fem.py b/src/bgem/upscale/fem.py
new file mode 100644
index 0000000..6b995fd
--- /dev/null
+++ b/src/bgem/upscale/fem.py
@@ -0,0 +1,689 @@
+"""
+Exact FEM based homogenization using regular d-dimansional grid.
+"""
+from functools import cached_property
+import numpy as np
+from .fields import tn_to_voigt, voigt_to_tn, voigt_coords
+from .homogenization import equivalent_posdef_tensor
+#from bgem.stochastic import dfn
+import scipy.sparse as sp
+import pyamg
+
+def Q1_1d_basis(points):
+ """
+ Coeeficients of 1D Q1 basis.
+
+ return a_ij matrix
+ where i row is i-th basis function with coefficients:
+ p_i(x) = a_i0 + a_i1 * x + a_i2 * x**2 + ...
+ """
+ order = len(points)
+ res = np.empty((order, order))
+ prod = np.ones(order)
+ for i in range(order):
+ res[i] = prod
+ prod = prod * points
+ monomial_at_point = res
+ return np.linalg.inv(monomial_at_point)
+
+def poly_diff_1d(poly_functions):
+ """
+ poly_functions (n, m) shape array with m coefficients for n functions
+ Returns derivatives of the functions, shape: (n, m-1)
+ """
+ order = poly_functions.shape[1]
+ return poly_functions[:, 1:] * np.arange(1, order)
+
+
+
+def eval_1d(poly_functions, x):
+ """
+ Evaluate polynomials `poly_functions`, shape (n, order) for
+ vector of values `x`.
+ return shape: (n, len(x))
+ """
+ #x = np.atleast_1d(x)
+ # print((order, *x.shape))
+ x = np.array(x)
+ abs_coef = poly_functions[:, np.full_like(x, -1, dtype=np.int64)]
+ # broadcast abs term to the result shape
+ res = abs_coef
+ for coef in poly_functions.T[-2::-1]:
+ res = res * x[None, :] + coef[:, None]
+ return res
+
+
+# evaluation of tensor product basis functions
+#
+def flat_dim(x, dim):
+ """
+ Flatten dimension related axes, i.e. first $d$ axes of the $x$ array.
+ x shape (n_1, ... n_d, other) -> (n_1 * .. * n_d, other)
+ """
+ return x.reshape((-1, *x.shape[dim:]))
+
+
+def tensor_dim(x, dim, order):
+ """
+ x shape (order**dim, other) -> (order, ... , order, other)
+
+ """
+ assert x.shape[0] == order ** dim
+ new_shape = (*(dim * [order]), *x.shape[1:])
+ return x.reshape(new_shape)
+
+
+def outer_product_along_first_axis(arrays):
+ """
+ arrays: list of `k` arrays a_i with shape [n_i, m]
+ :param arrays:
+ :return: res with shape [n_1, ... n_k, m]
+ """
+ _, n = arrays[0].shape
+ result = arrays[0]
+ for arr in arrays[1:] :
+ assert arr.shape[1] == n
+ result = result[..., np.newaxis, :] * arr
+ return result
+
+
+class Fe:
+ """
+ Tensor product basis.
+ """
+
+ @classmethod
+ def Q(cls, dim, order=1):
+ order = order + 1
+ points = np.linspace(0, 1, order)
+ basis = Q1_1d_basis(points)
+ return cls(dim, basis)
+
+ def __init__(self, dim, basis_1d):
+ """
+ """
+ n, m = basis_1d.shape
+ assert n == m
+ self.n_dofs_1d = n
+ self.dim = dim
+ self.basis = basis_1d
+ self.diff_basis = poly_diff_1d(basis_1d)
+
+ @property
+ def n_dofs(self):
+ return self.n_dofs_1d ** self.dim
+
+ def eval(self, points):
+ """
+ Evaluate all tensor product basis functions at given points.
+ """
+ dim, n_points = points.shape
+ assert dim == self.dim
+ #print(self.basis)
+ #print(points.ravel())
+ dim_basis_values = eval_1d(self.basis, points.ravel()).reshape(-1, self.dim, n_points)
+ # shape (order, dim , n_points))
+ tensor_values = outer_product_along_first_axis(dim_basis_values.transpose([1, 0, 2]))
+ # shape (order, ... order, n_points)
+ return flat_dim(tensor_values, self.dim)
+
+ def grad_eval(self, points):
+ """
+ points: (dim, n_points)
+ Evaluate gradients of all tensor product basis functions at given points.
+ return: shape (dim, n_basis_fn, n_points)
+ """
+ dim, n_points = points.shape
+ assert dim == self.dim
+ dim_basis_values = eval_1d(self.basis, points.ravel()).reshape(-1, self.dim, n_points)
+ # shape (order, dim , n_points))
+ diff_vals = eval_1d(self.diff_basis, points.ravel())
+ dim_diff_values = diff_vals.reshape(-1, self.dim, n_points)
+ # shape (order, dim , n_points))
+
+ result = []
+ for i_dim in range(dim):
+ diff_product_basis_list = [
+ dim_diff_values[:, j_dim, :] if j_dim == i_dim else dim_basis_values[:, j_dim, :]
+ for j_dim in range(dim)]
+ prod_basis = outer_product_along_first_axis(diff_product_basis_list)
+ result.append(flat_dim(prod_basis, self.dim))
+ result = np.stack(result)
+ # print(result.shape)
+ return result
+
+ def ref_el_dofs(self):
+ """
+ Positions of the DOFs on the reference element.
+ ref_el_dofs[:, i] .. position of the i-th dofs
+ :return: ndarray shape (dim, n_dofs)
+ """
+ n = self.n_dofs_1d
+ grid_slice = tuple(self.dim * [slice(0, n)])
+ return np.mgrid[grid_slice].reshape(self.dim, -1)
+
+ def __repr__(self):
+ return f"Q1(d={self.dim}, order={self.n_dofs_1d - 1})"
+
+
+
+class Grid:
+ """
+ Regular anisotropic grid for a box in D dimensional ambient space.
+ The cells are linearly numbered as np.empty(nx,ny,nz).flatten
+ Cells numbered as C-style numpy array, last dimension running the fastest.
+ """
+ @classmethod
+ def from_step(cls, dimensions, step, origin=0):
+ shape = np.ceil(dimensions / step).astype(int)
+ return cls(dimensions, shape, origin=origin)
+
+ def __init__(self, dimensions, n_steps, origin=0):
+ """
+ dimensions: sequence of the physical dimensions of the grid box
+ n_steps: sequence, number of cells in each axis, e.g. (nx, ny, nz)
+ or just int for same number of cells in each axis (n,n,n)
+ origin: position of the grid node zero
+ """
+ self.dimensions = dimensions
+ # Array with physital dimensions of the homogenization domain.
+ self.dim = len(dimensions)
+ # Dimension of the ambient space, e.g. 1d, 2d, 3d.
+ self.shape = n_steps * np.ones(self.dim, dtype=np.int64)
+ # Int Array with number of cells in each axis, i.e. shape of the grid
+ self.origin = origin * np.ones(self.dim)
+ # Absolute position of the node zero.
+
+ @property
+ def __len__(self):
+ """
+ Number of cells
+ We undrestood the grid as a collection of cells.
+ :return:
+ """
+ return np.prod(self.shape)
+
+ @property
+ def n_elements(self):
+ return np.prod(self.shape)
+
+ @cached_property
+ def step(self):
+ # Array with step size in each axis.
+ return self.dimensions / self.shape
+
+ def grid_center(self):
+ """
+ Return cooridinates of the barycenter of the whole grid.
+ Result: np.array, shape = (3,)
+ """
+ return (2*self.origin + self.dimensions) / 2
+
+ def barycenters(self):
+ """
+ Barycenters of elements.
+ n_els = prod( n_steps )
+ :return: shape (n_els, dim)
+ """
+ bary_axes = [self.step[i] * (np.arange(self.shape[i]) + 0.5) for i in range(self.dim)]
+ mesh_grid = np.meshgrid(*bary_axes, indexing='ij')
+ mesh_grid_array = np.stack(mesh_grid, axis=-1)
+ return mesh_grid_array.reshape(-1, self.dim) + self.origin
+
+
+ def __repr__(self):
+ msg = f"Grid({self.shape}, domain: {self.dimensions}@{self.origin})"
+ return msg
+
+ def points_to_cell_coords(self, points):
+ """
+ For a point array of shape (N, 3) return
+ array (N,3) of cell indices containing the points.
+ Indices set to -1 if point is out of the grid.
+ :param points:
+ :return:
+ """
+ centers_ijk_grid = (points - self.origin) // self.step[None, :]
+ centers_ijk_grid = centers_ijk_grid.astype(np.int32)
+ out_of_grid = np.logical_or( centers_ijk_grid < 0, centers_ijk_grid >=self.shape[None,:])
+ centers_ijk_grid[out_of_grid, :] = -1
+ return centers_ijk_grid
+
+ def coord_aabb(self, aabb):
+ i_aabb = (aabb - self.origin) // self.step[None, :]
+ i_aabb = i_aabb.astype(np.int32)
+ i_aabb = np.maximum(i_aabb, 0)
+ i_aabb = np.minimum(i_aabb, self.shape-1)
+ return i_aabb
+
+ def project_points(self, points):
+ """
+ :param points: array of shape (N, dim)
+ :return: For each point the index of the containing grid cell.
+ """
+ #grid_min_corner = -grid.dimensions / 2
+ centers_ijk_grid = (points - self.origin) // self.step[None, :]
+ centers_ijk_grid = centers_ijk_grid.astype(np.int32)
+ assert np.alltrue(centers_ijk_grid < self.shape[None, :])
+ grid_cell_idx = centers_ijk_grid[:, 0] + self.shape[0] * (
+ centers_ijk_grid[:, 1] + self.shape[1] * centers_ijk_grid[:, 2])
+ return grid_cell_idx
+
+ def cell_box(self, min_cell, max_cell):
+ """For given cell coords return array of cell ids in the box"""
+ pass
+
+ def axes_linspace(self):
+ """
+ Return list of linspaces, on for each axis.
+ :return:
+ """
+ return [
+ np.linspace(self.origin[ax], self.origin[ax] + self.dimensions[ax], self.shape[ax] + 1, dtype=np.float32)
+ for ax in range(self.dim)
+ ]
+
+ def axes_cell_coords(self):
+ """
+ Return list of self.dim arrays specifying positions of the cell barycenters
+ in each axis.
+ :return:
+ """
+ return [
+ np.arange(self.origin[ax] + self.step[ax]/2, self.origin[ax] + self.dimensions[ax], self.step[ax], dtype=np.float32)
+ for ax in range(self.dim)
+ ]
+
+ def cell_field_C_like(self, cell_array_F_like):
+ """
+ :param cell_array: shape (n_elements, *value_dim) in F-like numbering
+ :return: Same values rearranged for a C-like indexing, Z index running the fastest
+ ... used in self
+ """
+ value_shape = cell_array_F_like.shape[1:]
+ grid_field = cell_array_F_like.reshape(*reversed(self.shape), *value_shape)
+ transposed = grid_field.transpose(*reversed(range(self.dim)))
+ return transposed.reshape(-1, *value_shape)
+
+ def cell_field_F_like(self, cell_array_C_like):
+ """
+ :param cell_array: shape (n_elements, *value_dim) in C-like numbering
+ :return: Same values rearranged for a F-like indexing, X index running the fastest
+ ... used in PyVista.
+ """
+ value_shape = cell_array_C_like.shape[1:]
+ grid_field = cell_array_C_like.reshape(*self.shape, *value_shape)
+ transposed = grid_field.transpose(*reversed(range(self.dim)),-1)
+ return transposed.reshape(-1, *value_shape)
+
+
+def fem_grid(dimensions, shape, fe, origin=None):
+ """
+ More compact construction of the FEM problem.
+ :param dimensions:
+ :param shape:
+ :param fe:
+ :param origin:
+ :return:
+
+ """
+ dimensions = dimensions = dimensions * np.ones(fe.dim)
+ if origin is None:
+ origin = 0
+ grid = Grid(dimensions, shape, origin)
+ return FEM(grid, fe)
+
+class FEM:
+ def __init__(self, grid, fe):
+ self.grid = grid
+ # The grid of finite elements.
+ self.fe = fe
+ # Tensor product finite element class.
+ assert self.grid.dim == self.fe.dim
+
+ self.n_bc_dofs = 0
+ # Number of bounday DOFs, first part of the calculation numbering of DOFs.
+ self.natur_map = None
+ # gives natural dof index for given calculation dof index
+ # natural numbering comes from flattened (ix, iy, iz) dof coordinates
+ # calculation numbering puts Dirichlet DOFs at the begining
+ self.el_dofs = None
+ # shape (n_elements, n_local_dofs), DOF indices in calculation numbering
+
+ self.make_numbering(self.grid.dim)
+
+ def make_numbering(self, dim):
+ # grid of integers, set to (-1)
+ # go through boundary, enumerate, skip filled values
+ # go through internal nodes, enumerate remaining
+ # reshape -> computation_from_natural
+ assert self.dofs_shape.shape == (dim,)
+ n_dofs = np.prod(self.dofs_shape)
+ # mark boundary dofs -1, interior dofs -2
+ calc_map = np.full(self.dofs_shape, -1, dtype=np.int64)
+ interior_slice = tuple(dim * [slice(1, -1)])
+ calc_map[interior_slice] = -2
+
+ # construct new numbering of dofs
+ el_indices = np.where(calc_map == -1)
+ self.n_bc_dofs = len(el_indices[0])
+ # print(self.n_bc_dofs, indices)
+ calc_map[el_indices] = np.arange(0, self.n_bc_dofs, dtype=np.int64)
+ el_indices = np.where(calc_map == -2)
+ calc_map[el_indices] = np.arange(self.n_bc_dofs, n_dofs, dtype=np.int64)
+ calc_map = calc_map.flatten()
+ self.natur_map = np.empty(len(calc_map), dtype=np.int64)
+ self.natur_map[calc_map[:]] = np.arange(len(calc_map), dtype=np.int64)
+ assert len(self.natur_map) == self.n_dofs
+
+ # create element dofs mapping in natural dofs numbering
+ ref_dofs = self.fe.ref_el_dofs() # shape (dim, n_local_dofs)
+ assert ref_dofs.shape == (dim, self.fe.n_dofs_1d ** dim)
+
+ #print(ax.shape, ref_dofs.shape)
+ # Dof indices on the first cell.
+ cell_0_dofs = (self.dof_coord_coef[None, :] @ ref_dofs).ravel()
+ #print(ref_dofs.shape)
+
+ # Creating a meshgrid for each dimension
+ el_indices = np.meshgrid(*[np.arange(n) for n in self.grid.shape], indexing='ij')
+
+ # Calculating the tensor values based on the formula and axes
+ el_dofs = np.zeros(self.grid.shape, dtype=np.int64)
+ o = self.fe.n_dofs_1d - 1
+ for d in range(dim):
+ el_dofs += (self.dof_coord_coef[d] * o ** (d + 1)) * el_indices[d]
+ #print(el_dofs)
+ el_dofs = el_dofs[..., None] + cell_0_dofs[None, :] # shape: nx, nY, nz, loc_dofs
+ self.el_dofs = calc_map[el_dofs.reshape(-1, el_dofs.shape[-1])]
+ assert self.el_dofs.shape == (self.grid.n_elements, self.fe.n_dofs)
+
+
+ @property
+ def n_loc_dofs(self):
+ return self.fe.n_dofs
+
+ @property
+ def dofs_shape(self):
+ """
+ Shape of DOFs grid.
+ :return:
+ """
+ return self.grid.shape * (np.array(self.fe.n_dofs_1d) - 1) + 1
+
+ @property
+ def n_dofs(self):
+ return np.prod(self.dofs_shape)
+
+ # @property
+ # def ax_dofs(self):
+ # """
+ # Number of dofs in each axis.
+ # :return:
+ # """
+ # return self.n_steps * (self.fe.n_dofs_1d - 1) + 1 # shape (dim, )
+
+ @property
+ def dof_coord_coef(self):
+ # Array for computiong global dof index from dof int coords.
+ #
+ # idx = sum(coord * coord_coef)
+ # 1D: [1]
+ # 2D: [ny, 1]
+ # 3D: [ny*nz, nz, 1]
+ return np.cumprod([1, *self.dofs_shape[:0:-1]])[::-1]
+
+ def get_dof_grid(self):
+ return Grid(self.grid.dimensions,
+ self.grid.step / (self.fe.n_dofs_1d - 1),
+ self.origin)
+
+ def nodes(self):
+ """
+ Nodes of the grid.
+ n_nodes = prod( n_steps + 1 )
+ :return: shape (n_nodes, dim)
+ """
+ node_coords = self.dof_idx_to_coord(np.arange(self.n_dofs, dtype=np.int64))
+ return node_coords * self.grid.step[None, :] + self.grid.origin[None, :] #mesh_grid_array.reshape(-1, self.dim) + self.origin
+
+ @cached_property
+ def bc_coords(self):
+ """
+ ?? todo transpose, refactor
+ :return:
+ """
+ bc_natur_indeces = self.natur_map[np.arange(self.n_bc_dofs, dtype=np.int64)]
+ return self.dof_idx_to_coord(bc_natur_indeces)
+
+ @cached_property
+ def bc_points(self):
+ """
+ todo refactor
+ :return:
+ """
+ return self.bc_coords * self.grid.step[None, :] + self.grid.origin[None, :]
+
+
+ def dof_idx_to_coord(self, dof_natur_indices):
+ """
+ Produce index coordinates (ix,iy,iz) for given natural dof indices.
+ :param dof_natur_indices: np.int64 array, shape (n_dofs,)
+ :return: integer coordinates: (len(dof_natur_indeces), self.dim)
+ """
+ indices = dof_natur_indices
+ coords = np.empty((*dof_natur_indices.shape, self.grid.dim), dtype=np.int64)
+ for i in range(self.grid.dim-1, 0, -1):
+ indices, coords[:, i] = np.divmod(indices, self.dofs_shape[i])
+ #indices, coords[:, i] = np.divmod(indices, self.dof_to_coord[i])
+ coords[:, 0] = indices
+ return coords
+
+ def __repr__(self):
+ msg = f"FEM({self.fe} {self.grid})"
+ return msg
+
+
+ @cached_property
+ def laplace(self):
+ """
+ Return matrix M. Shape (n_voigt, n_loc_dofs * n_loc_dofs).
+
+ This should be used to assmebly the local matrices like:
+ A_loc = M[None, :, :] @ K[:, None, :]
+ where A_loc is array of local matrices (n_elements, n_loc_dofs**2)
+ and K is (n_elements, K_tn_size), where K_tn_size is just upper triangle values
+ i.e. 1, 3, 6 for dim=1, 2, 3.
+ """
+ # we integrate square of gradients, which is poly of degree 2*(deg -1) = 2deg - 2
+ # Gaussian quadrature integrates exactly degree 2*deg -1
+ deg = 2* (self.fe.n_dofs_1d - 1) # 2 * degeree of base function polynomials
+
+ # points and wights on [0, 1] interval
+ points, weights = np.polynomial.legendre.leggauss(deg)
+ points = 0.5 * (points + 1.0)
+ weights = 0.5 * weights
+
+ msh_params = self.grid.dim * [points]
+ points_tn = np.stack(np.meshgrid(*msh_params)).reshape((self.grid.dim, -1))
+ outer_params = [jac * weights[:, None] for jac in self.grid.step]
+ weights_tn = outer_product_along_first_axis(outer_params).ravel()
+ grad = self.fe.grad_eval(points_tn) # shape: (dim, n_loc_dofs, n_quads)
+ # dim, n_loc_dofs, n_quad = grad.shape
+ weight_grad = weights_tn[None, None :] * grad[:, :, :] # (dim, n_loc_dofs, n_quads)
+ full_tn_laplace = grad[:, None, :, :] @ weight_grad[None, :, :, :].transpose(0, 1, 3, 2)
+
+ M = [
+ full_tn_laplace[i, j, :, :]
+ if i==j else
+ full_tn_laplace[i, j, :, :] + full_tn_laplace[j, i, :, :]
+ for i, j in voigt_coords[self.grid.dim]
+ ]
+ # M shape [n_voight, n_loc_dofs, n_loc_dofs]
+ # return np.reshape(M, (len(M), -1)).T
+ M = np.stack(M)
+ assert M.shape == (len(voigt_coords[self.grid.dim]), self.fe.n_dofs, self.fe.n_dofs)
+ return M.reshape((M.shape[0], -1))
+
+ @cached_property
+ def loc_mat_ij(self):
+ """
+ returns: rows, cols
+ both with shape: (loc_dofs, loc_dofs, n_elements)
+ Provides rows and cols for the local matrices.
+ """
+ n_elements, n_loc_dofs = self.el_dofs.shape
+ rows = np.tile(self.el_dofs[:, :, None], [1, 1, n_loc_dofs])
+ cols = np.tile(self.el_dofs[:, None, :], [1, n_loc_dofs, 1])
+ return rows, cols
+
+ def _loc_matrices(self, K_voight_tensors):
+ """
+ K_voight_tensors, shape: (n_elements, n_voight)
+ """
+ assert K_voight_tensors.shape == (self.grid.n_elements, len(voigt_coords[self.grid.dim]))
+ laplace = self.laplace
+ # n_voight, locmat_dofs == laplace.shape
+ # Use transpositions of intputs and output in order to enforce cache efficient storage.
+ #loc_matrices = np.zeros((self.n_loc_dofs self.n_loc_dofs, self.n_elements))
+ #np.matmul(.T[:, None, :], laplace[None, :, :], out=loc_matrices.reshape(-1, self.n_elements).T)
+ loc_matrices = K_voight_tensors[:, None, :] @ laplace[None, :, :]
+ return loc_matrices.reshape((self.grid.n_elements, self.fe.n_dofs, self.fe.n_dofs))
+
+ def assembly_dense(self, K_voight_tensors):
+ """
+ K_voight_tensors, shape: (n_elements, n_voight)
+ """
+ loc_matrices = self._loc_matrices(K_voight_tensors)
+ A = np.zeros((self.n_dofs, self.n_dofs))
+ # Use advanced indexing to add local matrices to the global matrix
+ np.add.at(A, self.loc_mat_ij, loc_matrices)
+ return A
+
+ def assembly_csr(self, K_voight_tensors):
+ """
+ K_voight_tensors, shape: (n_elements, n_voight)
+ """
+ rows, cols = self.loc_mat_ij
+ values = self._loc_matrices(K_voight_tensors)
+ rows, cols, values = map(np.ravel, [rows, cols, values])
+
+ # Drop boundary rows.
+ bulk_rows = rows >= self.n_bc_dofs
+ rows, cols, values = [array[bulk_rows] for array in (rows, cols, values)]
+ rows = rows - self.n_bc_dofs
+
+ # Split boundary and bulk rows
+ # See also np.split for possible faster implementation
+ n_dofs = self.n_dofs - self.n_bc_dofs
+ def sub_mat(rows, cols, vals, condition, n_cols, idx_shift):
+ sub_cols = cols[condition] - idx_shift
+ return sp.csr_matrix((vals[condition], (rows[condition], sub_cols)), shape=(n_dofs, n_cols))
+
+ bulk_cols = (cols >= self.n_bc_dofs)
+ A_bulk = sub_mat(rows, cols, values, bulk_cols, self.n_dofs - self.n_bc_dofs, self.n_bc_dofs)
+ A_bc = sub_mat(rows, cols, values, ~bulk_cols, self.n_bc_dofs, 0)
+ return A_bulk, A_bc
+
+ def solve_direct(self, K, p_grad_bc):
+ """
+ :param K: array, shape: (n_elements, n_voight)
+ K = array of shape (*self.shape, n_voight).reshape(-1, n_voight)
+ cell at position (iX, iY, iZ) has index
+ (iX * self.shape[1] + iY) * self.shape[2] + iZ
+ i.e. the Z index is running fastest,
+ :param p_grad_bc: array, shape: (n_vectors, dim)
+ usually n_vectors >= dim
+ :return: pressure, shape: (n_vectors, n_dofs)
+ """
+ n_rhs, d = p_grad_bc.shape
+ assert d == self.grid.dim
+ A = self.assembly_dense(K)
+ bc_points_rel = self.bc_coords * self.grid.step[None, :]
+ pressure_bc = p_grad_bc @ bc_points_rel.T # (n_vectors, n_bc_dofs)
+ B = pressure_bc @ A[:self.n_bc_dofs, self.n_bc_dofs:] # (n_vectors, n_interior_dofs)
+ pressure = np.empty((n_rhs, self.n_dofs))
+ pressure[:, :self.n_bc_dofs] = pressure_bc
+ pressure[:, self.n_bc_dofs:] = np.linalg.solve(A[self.n_bc_dofs:, self.n_bc_dofs:], -B.T).T
+ pressure_natur = np.empty_like(pressure)
+ pressure_natur[:, self.natur_map[:]] = pressure[:, :]
+ return pressure_natur.reshape((n_rhs, -1))
+
+
+ def solve_sparse(self, K, p_grad_bc):
+ """
+ :param K: array, shape: (n_elements, n_voight)
+ K = array of shape (*self.shape, n_voight).reshape(-1, n_voight)
+ cell at position (iX, iY, iZ) has index
+ (iX * self.shape[1] + iY) * self.shape[2] + iZ
+ i.e. the Z index is running fastest,
+ :param p_grad_bc: array, shape: (n_vectors, dim)
+ usually n_vectors >= dim
+ :return: pressure, shape: (n_vectors, n_dofs)
+ """
+ n_rhs, d = p_grad_bc.shape
+ assert d == self.grid.dim
+ A_bulk, A_bc = self.assembly_csr(K)
+ bc_points_rel = self.bc_coords * self.grid.step[None, :]
+ pressure_bc = p_grad_bc @ bc_points_rel.T # (n_vectors, n_bc_dofs)
+ B = (A_bc @ pressure_bc.T).T # (n_vectors, n_interior_dofs)
+
+ #pyamg.ruge_stuben_solver(A_bulk)
+ solver = pyamg.smoothed_aggregation_solver(A_bulk, symmetry='symmetric')
+ pressure = np.zeros((n_rhs, self.n_dofs))
+ pressure[:, :self.n_bc_dofs] = pressure_bc
+ for pressure_comp, b in zip(pressure, B):
+ pressure_comp[self.n_bc_dofs:] = solver.solve(-b)
+ #pressure[:, ] = np.linalg.solve(A[self.n_bc_dofs:, self.n_bc_dofs:], -B.T).T
+ pressure_natur = np.empty_like(pressure)
+ pressure_natur[:, self.natur_map[:]] = pressure[:, :]
+ return pressure_natur.reshape((n_rhs, -1))
+
+ def field_grad(self, dof_vals):
+ """
+ Compute solution gradient in element barycenters.
+ :param dof_vals: (n_vec, n_dofs)
+ :return: (n_vec, n_el, dim)
+ """
+ el_dof_vals = dof_vals[:, self.natur_map[self.el_dofs[:, :]]] # (n_vec, n_el, n_loc_dofs)
+ quads = np.full((self.grid.dim, 1), 0.5) # Zero order Gaussian quad. Integrates up to deg = 1.
+ grad_basis = self.fe.grad_eval(quads) # (dim, n_loc_dofs, 1)
+ grad_els = grad_basis[None,None,:, :,0] @ el_dof_vals[:,:, :, None]
+ return grad_els[:, :,:,0]
+
+
+def upscale(K, domain=None):
+ """
+
+ :param K: array (nx, ny, nz, n_voigt) or similar for dim=1, 2
+ :param domain: domain size array, default np.ones(dim)
+ :return: Effective tensor.
+ """
+ dim = len(K.shape) - 1
+ if domain is None:
+ domain = np.ones(dim)
+
+ order = 1
+ fem = FEM(Grid(domain, K.shape[:-1]), Fe.Q(dim, order))
+ p_grads = np.eye(dim)
+ K_els = K.reshape((fem.grid.n_elements, -1))
+ pressure = fem.solve_direct(K_els, p_grads)
+ #xy_grid = [np.linspace(0, g.size[i], g.ax_dofs[i]) for i in range(2)]
+ #fem_plot.plot_pressure_fields(*xy_grid, pressure)
+ #pressure_flat = pressure.reshape((len(p_grads), -1))
+ grad = fem.field_grad(pressure) # (n_vectors, n_els, dim)
+ loads = np.average(grad, axis=1) # (n_vectors, dim)
+ full_K_els = voigt_to_tn(K_els)
+ responses_els = grad[:, :, None, :] @ full_K_els[None, :, :, :] #(n_vec, n_els, 1, dim)
+ responses = np.average(responses_els[:, :, 0, :], axis=1)
+ return equivalent_posdef_tensor(loads, responses)
+
+
+# def rasterize_dfn(fractures: dfn.FractureSet, step):
+# """
+# Rasterize given fracture to the grid with `step`
+# :param fractures:
+# :param step:
+# :return:
+# """
+# pass
\ No newline at end of file
diff --git a/src/bgem/upscale/fem_plot.py b/src/bgem/upscale/fem_plot.py
new file mode 100644
index 0000000..a2c3bc3
--- /dev/null
+++ b/src/bgem/upscale/fem_plot.py
@@ -0,0 +1,126 @@
+import pyvista as pv
+import numpy as np
+import pathlib
+from bgem.upscale import Grid
+import matplotlib.pyplot as plt
+import numpy as np
+
+"""
+Custom plotting function, mainly for debugging and test purpose.
+- VTK output of cell and vector fields on a structured grid
+- PyVista plot of a given cell / point field
+- 3d Scatter glyph visualization of a vector/tensor field
+"""
+
+
+
+def grid_fields_vtk(grid:Grid,
+ cell_fields = None,
+ point_fields = None,
+ vtk_path: pathlib.Path=None):
+ """
+ Output given cell and point fields to VTK.
+ Return: the pv grid object with the cell and point data arrays
+ """
+ x, y, z = np.meshgrid(*grid.axes_linspace(), indexing='ij')
+ pv_grid = pv.StructuredGrid(x, y, z)
+ if cell_fields is not None:
+ for k, v in cell_fields.items():
+ if pv_grid.GetNumberOfCells() != v.shape[0]:
+ raise ValueError(f"Cell field size {v.shape[0]} mismatch number of cells {pv_grid.GetNumberOfCells()}")
+ pv_grid.cell_data[k] = v
+ if point_fields is not None:
+ for k, v in point_fields.items():
+ if pv_grid.GetNumberOfPoints() != v.shape[0]:
+ raise ValueError(f"Point field size {v.shape[0]} mismatch number of points {pv_grid.GetNumberOfPoints()}")
+ pv_grid.point_data[k] = v
+ if vtk_path is not None:
+ pv_grid.save(str(vtk_path))
+ return pv_grid
+
+def create_plotter(**options):
+ #pv.start_xvfb()
+ font_size = 20
+ #pv.global_theme.font.size = font_size
+ plotter = pv.Plotter(**options)
+ # Add axes and bounding box for context
+ plotter.add_axes()
+ plotter.show_grid()
+ plotter.add_bounding_box()
+ return plotter
+#
+# def pv_plot_mesh(pv_grid, color='grey', opacity=1.0, plotter = None):
+# """
+# Usage:
+# plotter = pv_plot_mesh(mesh_one)
+# plotter = pv_plot_mesh(mesh_two, plotter=plotter)
+# plotter.show()
+# """
+# if plotter is None:
+# pv.start_xvfb()
+# font_size = 20
+# pv.global_theme.font.size = font_size
+# plotter = pv.Plotter(off_screen=True, window_size=(1024, 768))
+# # Add axes and bounding box for context
+# plotter.add_axes()
+# plotter.show_grid()
+# plotter.add_bounding_box()
+#
+# #plotter.set_font(font_size=font_size)
+# plotter.add_mesh(pv_grid, color=color, opacity=opacity)
+#
+# return plotter
+
+
+def plot_grid(n):
+ """
+ Create
+ :param n:
+ :return:
+ """
+ # Create a PyVista mesh from the points
+ points = np.mgrid[:n, :n, :n] / (n - 1.0)
+ mesh = pv.StructuredGrid(*points[::-1])
+ points = points.reshape((3, -1))
+ return points, mesh
+
+def pv_plotter(meshes):
+ # Create a plotting object
+ p = pv.Plotter()
+
+ # Add axes and bounding box for context
+ p.add_axes()
+ p.show_grid()
+ p.add_bounding_box()
+
+ # Show the plot
+ p.show()
+
+
+def scatter_3d(mesh, values, n=5):
+ # Normalize the function values for use in scaling
+ scaled_values = (values - np.min(values)) / (np.max(values) - np.min(values))
+
+ mesh['scalars'] = scaled_values
+
+ # Create the glyphs: scale and color by the scalar values
+ geom = pv.Sphere(phi_resolution=8, theta_resolution=8)
+ glyphs = mesh.glyph(geom=geom, scale='scalars', factor=0.3)
+
+
+ # Add the glyphs to the plotter
+ p.add_mesh(glyphs, cmap='coolwarm', show_scalar_bar=True)
+
+
+
+def plot_fn_3d(fn, n=5):
+ points, mesh = plot_grid(n)
+ values = fn(*points[::-1])
+ scatter_3d(mesh, values)
+
+
+def f(x, y, z):
+ return x * (1 - y) * z * (1 - z) * 4
+
+
+#plot_fn_3d(f)
\ No newline at end of file
diff --git a/src/bgem/upscale/fields.py b/src/bgem/upscale/fields.py
new file mode 100644
index 0000000..e664974
--- /dev/null
+++ b/src/bgem/upscale/fields.py
@@ -0,0 +1,80 @@
+"""
+Functions for construction of structured media fields.
+"""
+import numpy as np
+
+voigt_coords = {
+ 1: [(0, 0)],
+ 2: [(0, 0), (1, 1), (0, 1)],
+ 3: [(0, 0), (1, 1), (2, 2), (1, 2), (0, 2), (0, 1)]
+ }
+
+idx_to_full = {
+ 1: [(0, 0)],
+ 3: [[0, 2], [2, 1]],
+ 6: [[0, 5, 4], [5, 1, 3], [4, 3, 2]]
+ }
+
+idx_to_voigt = {
+ 1: ([0], [0]),
+ 2: ([0, 1, 0], [0, 1, 1]),
+ 3: ([0, 1, 2, 1, 0, 0], [0, 1, 2, 2, 2, 1])
+}
+
+def voigt_to_tn(vec):
+ """
+ :param vec: (N, n_voigt)
+ :return: (N, dim, dim)
+ """
+ _, n_voigt = vec.shape
+ return vec[:, idx_to_full[n_voigt]]
+
+def tn_to_voigt(tn):
+ """
+ (N, dim, dim) -> (N, n_voight)
+ :param array:
+ :return:
+ """
+ _, dim, dim = tn.shape
+ return tn[:, idx_to_voigt[dim][0], idx_to_voigt[dim][1]]
+ # m, n, N = tn.shape
+ # assert m == n
+ # dim = m
+ # tn_voigt = [
+ # tn[i, j, :]
+ # for i, j in voigt_coords[dim]
+ # ]
+ # return np.array(tn_voigt)
+
+def _tn(k, dim):
+ if type(k) == float:
+ k = k * np.eye(dim)
+ assert k.shape == (dim, dim)
+ return k
+def K_structured(points, K0, Kx=None, fx=2.0, Ky=None, fy=4.0, Q=None):
+ """
+
+ :param points:
+ :param K0:
+ :param Kx:
+ :param fx:
+ :param Ky:
+ :param fy:
+ :param Q:
+ :return: (N, n_voigt)
+ """
+ x = points # shape: (N, dim)
+ _, dim = x.shape
+ K0 = _tn(K0, dim)
+ if Kx is None:
+ Kx = K0
+ if Ky is None:
+ Ky = Kx
+ Kx = _tn(Kx, dim)
+ Ky = _tn(Ky, dim)
+ t = 0.5 * (np.sin(2 * np.pi * fx * x[:, 0]) + 1)[:, None, None]
+ s = 0.5 * (np.sin(2 * np.pi * fy * x[:, 1]) + 1)[:, None, None]
+ K = t * K0 + (1 - t) * (s * Kx + (1 - s) * Ky)
+ if Q is not None:
+ K = Q.T @ K @ Q
+ return tn_to_voigt(K)
\ No newline at end of file
diff --git a/src/bgem/upscale/homogenization.py b/src/bgem/upscale/homogenization.py
new file mode 100644
index 0000000..59a317a
--- /dev/null
+++ b/src/bgem/upscale/homogenization.py
@@ -0,0 +1,82 @@
+import numpy as np
+import logging
+
+
+def equivalent_scalar(loads, responses):
+ assert loads.shape[1] == responses.shape[1] == 1
+ return np.dot(loads[:, 0], responses[:, 0]) / np.dot(loads[:, 0], loads[:, 0])
+
+def equivalent_sym_tensor_3d(loads, responses):
+ """
+ :param loads: array, (N, dim), e.g grad pressure grad_p(x)
+ :param responses: (N, dim), e.g. Darcian velocity -v(x) = K(x) grad_p(x)
+ :return:
+ """
+ # from LS problem for 6 unknowns in Voigt notation: X, YY, ZZ, YZ, XZ, XY
+ # the matrix has three blocks for Vx, Vy, Vz component of the responses
+ # each block has different sparsity pattern
+ n_loads = loads.shape[0]
+ zeros = np.zeros(n_loads)
+ ls_mat_vx = np.stack([loads[:, 0], zeros, zeros, zeros, loads[:, 2], loads[:, 1]], axis=1)
+ rhs_vx = responses[:, 0]
+ ls_mat_vy = np.stack([zeros, loads[:, 1], zeros, loads[:, 2], zeros, loads[:, 0]], axis=1)
+ rhs_vy = responses[:, 1]
+ ls_mat_vz = np.stack([zeros, zeros, loads[:, 2], loads[:, 1], loads[:, 0], zeros], axis=1)
+ rhs_vz = responses[:, 2]
+ ls_mat = np.concatenate([ls_mat_vx, ls_mat_vy, ls_mat_vz], axis=0)
+ rhs = np.concatenate([rhs_vx, rhs_vy, rhs_vz], axis=0)
+ assert ls_mat.shape == (3 * n_loads, 6)
+ assert rhs.shape == (3 * n_loads,)
+ result = np.linalg.lstsq(ls_mat, rhs, rcond=None)
+ cond_tn_voigt, residuals, rank, singulars = result
+ condition_number = singulars[0] / singulars[-1]
+ if condition_number > 1e3:
+ logging.warning(f"Badly conditioned inversion. Residual: {residuals}, max/min sing. : {condition_number}")
+ return cond_tn_voigt
+
+
+def equivalent_sym_tensor_2d(loads, responses):
+ """
+ :param loads: array, (N, dim), e.g grad pressure grad_p(x)
+ :param responses: (N, dim), e.g. Darcian velocity -v(x) = K(x) grad_p(x)
+ :return:
+ """
+ # from LS problem for 6 unknowns in Voigt notation: X, YY, ZZ, YZ, XZ, XY
+ # the matrix has three blocks for Vx, Vy, Vz component of the responses
+ # each block has different sparsity pattern
+ n_loads = loads.shape[0]
+ zeros = np.zeros(n_loads)
+ ls_mat_vx = np.stack([loads[:, 0], zeros, loads[:, 1]], axis=1)
+ rhs_vx = responses[:, 0]
+ ls_mat_vy = np.stack([zeros, loads[:, 1], loads[:, 0]], axis=1)
+ rhs_vy = responses[:, 1]
+ ls_mat = np.concatenate([ls_mat_vx, ls_mat_vy], axis=0)
+ rhs = np.concatenate([rhs_vx, rhs_vy], axis=0)
+ assert ls_mat.shape == (2 * n_loads, 3)
+ assert rhs.shape == (2 * n_loads,)
+ result = np.linalg.lstsq(ls_mat, rhs, rcond=None)
+ cond_tn_voigt, residuals, rank, singulars = result
+ condition_number = singulars[0] / singulars[-1]
+ if condition_number > 1e3:
+ logging.warning(f"Badly conditioned inversion. Residual: {residuals}, max/min sing. : {condition_number}")
+ return cond_tn_voigt
+
+
+_equivalent_sym_tensor = {
+ 1: equivalent_scalar,
+ 2: equivalent_sym_tensor_2d,
+ 3: equivalent_sym_tensor_3d
+}
+
+
+
+def equivalent_posdef_tensor(loads, responses):
+ # tensor pos. def. <=> load . response > 0
+ # ... we possibly modify responses to satisfy
+ dim = loads.shape[0]
+ assert dim == responses.shape[0]
+ unit_loads = loads / np.linalg.norm(loads, axis=1)[:, None]
+ load_components = np.sum(responses * unit_loads, axis=1)
+ responses_fixed = responses + (np.maximum(0, load_components) - load_components)[:, None] * unit_loads
+
+ return _equivalent_sym_tensor[dim](loads, responses_fixed)
diff --git a/src/bgem/upscale/voxelize.py b/src/bgem/upscale/voxelize.py
new file mode 100644
index 0000000..f071cfe
--- /dev/null
+++ b/src/bgem/upscale/voxelize.py
@@ -0,0 +1,894 @@
+from typing import *
+from pathlib import Path
+import csv
+import itertools
+import math
+
+import attrs
+from functools import cached_property
+from bgem.stochastic import Fracture
+
+from bgem.core import array_attr
+from bgem.upscale import Grid
+from bgem.stochastic import FractureSet, EllipseShape, PolygonShape
+
+import numpy as np
+from scipy import interpolate
+"""
+Voxelization of fracture network.
+Task description:
+Input: List[Fracutres], DFN sample, fractures just as geometric objects.
+Output: Intersection arrays: cell_idx, fracture_id x, intersection volume estimate
+(taking fr aperture and intersectoin area into account)
+That could be rather encoded into a sparse matrix or two for interpolation
+of bulk and fracture values to the field on the target domain.
+While the separate matrix for bulk and for fracture values may be better in some cases,
+all use cases are covered by a single interpolation matrix multiplying a vector composed
+of both fracture and bulk values.
+Covered cases:
+- input fields on a VTK/GMSH mesh (suitable internal format for the mesh ieadlly separate array for triangles, and array for tetrahedras
+- bulk on separate grid, fracture fields constant on fractures or their fragments
+- bulk on the trager grid, ...
+- bulk on any points first interpolated to the target grid
+
+TODO:
+1. More restricted API.
+ a) function to project bulk fiedls between grids, first implement using SCIPY interpolation.
+ b) computing intersection matrices to comibne bulk field on the target grid and fracture field values
+ into the traget grid field.
+ c) function to apply intersection object to particular bulk and fracture fields (support of fields of arbitrary shape: scalar, vector, tensor valued fields)
+
+2. Future API using connected bulk and fracture field vectors and common sparse matrix.
+ Desing after experience with restriced API.
+
+Possible intersection approaches:
+
+- for each fracture -> AABB -> loop over its cells -> intersection test -> area calculation
+- Monte Carlo : random points on each fracture (N ~ r**2), count point numbers in each cell, weights -> area/volume estimate
+"""
+
+
+
+"""
+TODO:
+1. 1. AABB grid -> centers of cells within AABB of fracture
+ 2. Fast selection of active cells: centers_distance < tol
+ 3. For active cells detect intersection with plane.
+ - active cells corners -> projection to fr plane
+ - detect nodes in ellipse, local system
+ - alternative function to detect nodes within n-polygon
+ 4. output: tripples: i_fracture, i_cell, cell_distance for each intersection
+ to get consistent conductivity along voxelized fracture, we must modify cells within normal*grid.step band
+ 5. rasterized full tenzor:
+ - add to all interacting cells
+ - add multiplied by distance dependent coefficient
+
+
+2. Direct homogenization test, with at least 2 cells across fracture.
+ Test flow with rasterized fractures, compare different homogenization routines
+ ENOUGH FOR SURAO
+
+3. For cells in AABB compute distance in parallel, simple fabric tenzor homogenization.
+ Possibly faster due to vectorization, possibly more precise for thin fractures.
+4. Comparison of conservative homo + direct upscaling on fine grid with fabric homogenization.
+
+5. Improved determination of candidate cells and distance, differential algorithm.
+
+"""
+
+def base_shape_interior_grid(shape, step:float) -> np.ndarray:
+ """
+ Return points of a grid that are inside the reference shape.
+ The grid is with `step` resolution extending from origin
+ in the XY reference plane of the shape.
+ Return shape (N, 2)
+ """
+ aabb = shape.aabb
+ points = Grid.from_step(aabb[1] - aabb[0], step, origin=aabb[0]).barycenters()
+ are_inside = shape.are_points_inside(points)
+ selected_pts = points[are_inside]
+ return selected_pts
+
+
+@attrs.define
+class FracturedDomain:
+ """
+ Structured bulk grid + unstructured fracture set.
+ The input of the voxelization procedure.
+
+ Specification of the fracture - target grid geometry.
+ This is in priciple enough to construct basic voxelization case.
+ other cases may need information about source grid/mesh.
+ """
+ dfn: FractureSet #
+ fr_cross_section: np.ndarray # cross_sections of fractures
+ grid: Grid # target homogenization grid
+
+
+
+@attrs.define
+class Intersection:
+ """
+ Intersection of fractures with the grid.
+ That is a sparse matrix for contribution of the fractures,
+ 1 - rowsum is scaling factor of the underlaied bulk array.
+
+ First we will proceed with this design moving to actual sparse matrix implementation later on.
+ The interpolation would be:
+
+ result_grid_field[i_cell] = (1-rowsum[i_cell])/cell_volume[i_cell] * bulk_grid_field[i_cell]
+ + volume[k] * (cells[k] == i_cell) * (fracture[k] == i_fr) * fr_field[i_fr]
+ The sparse
+ TODO: refine design, try to use sparse matrix for interpolation of a connected bulk-fracture values vector
+ """
+ domain = attrs.field(type=FracturedDomain) # The source object.
+ i_cell = array_attr(shape=(-1,), dtype=int) # sparse matrix rows, cell idx of intersection
+ i_fr = array_attr(shape=(-1,), dtype=int) # sparse matrix columns
+ isec = array_attr(shape=(-1,), dtype=float) # effective volume of the intersection
+ #bulk_scale: np.ndarray #
+ # used to scale bulk field
+
+ @classmethod
+ def const_isec(cls, domain, i_cell, i_fr, isec):
+ assert len(i_cell) == len(i_fr)
+ isec = np.broadcast_to([isec], (len(i_cell),))
+ return cls(domain, i_cell, i_fr, isec)
+
+ @property
+ def grid(self):
+ return self.domain.grid
+
+ @cached_property
+ def bulk_scale(self):
+ scale = np.ones(self.grid.n_elements, dtype=float)
+ scale[self.i_cell[:]] -= self.isec[:] # !! have to broadcast isec, use convertor from fr_set
+ return scale
+
+ def cell_field(self):
+ field = np.zeros(self.grid.n_elements)
+ field[self.i_cell] = 1.0
+ return field
+
+ def count_fr_cells(self):
+ """
+ Array of count of intersecting cells for every fracture.
+ :return:
+ """
+ fr_counts = np.zeros(len(self.domain.dfn))
+ unique_values, counts = np.unique(self.i_fr, return_counts=True)
+ fr_counts[unique_values] = counts
+ return fr_counts
+
+ def interpolate(self, bulk_field, fr_field, source_grid=None):
+ """
+ Rasterize bulk and fracture fields to the target grid, i.e. self.grid.
+ If source_Grid is given the bulk filed is first resampled to the target grid
+ using linear interpolation and scipy.
+
+
+ :param bulk_field:
+ :param fr_field:
+ :param source_grid:
+ :return:
+ """
+ # if source_grid is not None:
+ # assert np.allclose(np.array(source_grid.origin), np.array(self.grid.origin))
+ # assert np.allclose(np.array(source_grid.dimensions), np.array(self.grid.dimensions))
+ # grid_points = source_grid.axes_cell_coords()
+ # target_points = self.grid.barycenters()
+ # # !! Interpolation problem, we have piecewise values at input, but want to interpolate them linearly to th eoutput grid
+ # # finner output grid points are out of the range of the input grid.
+ # bulk_field = interpolate.interpn(grid_points,
+ # bulk_field.reshape(*source_grid.shape, *bulk_field.shape[1:]),
+ # target_points, method='linear')
+
+ assert len(bulk_field) == self.domain.grid.n_elements
+ assert len(fr_field) == len(self.domain.dfn)
+ len_value_shape = len(bulk_field.shape) - 1
+ scalar_shape = (-1, *(len_value_shape*[1]))
+ combined = bulk_field * self.bulk_scale.reshape(scalar_shape)
+ combined[self.i_cell[:]] += self.isec[:].reshape(scalar_shape) * fr_field[self.i_fr[:]]
+ return combined
+
+ def fr_tensor_2(self, fr_cond_scalar):
+ """
+
+ :return:
+ """
+ dfn = self.domain.dfn
+ #normal_axis_step = grid_step[np.argmax(np.abs(n))]
+ return fr_cond_scalar[:, None, None] * (np.eye(3) - dfn.normal[:, :, None] * dfn.normal[:, None, :]) #/ normal_axis_step
+
+ def perm_aniso_fr_values(fractures, fr_transmisivity: np.array, grid_step) -> np.ndarray:
+ '''Calculate anisotrop
+ assert source_grid.origin == self.originic permeability tensor for each cell of ECPM
+ intersected by one or more fractures. Discard off-diagonal components
+ of the tensor. Assign background permeability to cells not intersected
+ by fractures.
+ Return numpy array of anisotropic permeability (3 components) for each
+ cell in the ECPM.
+
+ fracture = numpy array containing number of fractures in each cell, list of fracture numbers in each cell
+ ellipses = [{}] containing normal and translation vectors for each fracture
+ T = [] containing intrinsic transmissivity for each fracture
+ d = length of cell sides
+ k_background = float background permeability for cells with no fractures in them
+ '''
+ assert len(fractures) == len(fr_transmisivity)
+ # Construc array of fracture tensors
+ def full_tensor(n, fr_cond):
+ normal = np.array(n)
+ normal_axis_step = grid_step[np.argmax(np.abs(n))]
+ return fr_cond * (np.eye(3) - normal[:, None] * normal[None, :]) / normal_axis_step
+
+ return np.array([full_tensor(fr.normal, fr_cond) for fr, fr_cond in zip(fractures, fr_transmisivity)])
+
+
+ def perm_iso_fr_values(fractures, fr_transmisivity: np.array, grid_step) -> np.ndarray:
+ '''Calculate isotropic permeability for each cell of ECPM intersected by
+ one or more fractures. Sums fracture transmissivities and divides by
+ cell length (d) to calculate cell permeability.
+ Assign background permeability to cells not intersected by fractures.
+ Returns numpy array of isotropic permeability for each cell in the ECPM.
+
+ fracture = numpy array containing number of fractures in each cell, list of fracture numbers in each cell
+ T = [] containing intrinsic transmissivity for each fracture
+ d = length of cell sides
+ k_background = float background permeability for cells with no fractures in them
+ '''
+ assert len(fractures) == len(fr_transmisivity)
+ fr_norm = np.array([fr.normal for fr in fractures])
+ normalised_transmissivity = fr_transmisivity / grid_step[np.argmax(np.abs(fr_norm), axis=1)]
+ return normalised_transmissivity
+
+
+def intersection_decovalex(dfn:FractureSet, grid: Grid) -> 'Intersection':
+ """
+ Based on DFN map / decovalex 2023 approach. Support for different fracture shapes,
+ vectorization.
+ Steps:
+ 1. for fractures compute arrays that could be computed by vector operations:
+ - fracture normal
+ - fracture transform matrix
+ - fracture angle (??)
+ - bounding box (depend on shape)
+ 2. estimate set of candidate cells:
+ - bounding box
+ - future: axis closses to normal for each point in AABB projection
+ determine fracture intersection (regular pattern), add 4/8 neighbor cells
+ Only do for larger fractures
+ 3. project nodes of candidate cells, need node_i_coord to i_node map,
+ use 2D matrix of the AABB projection
+ 4. Fast identification of celles within distance range from the fracture
+ 5. Shape matching of the cells.
+ 6. cell to fracture distance estimate
+ what could be computed in vector fassion
+ 2. for each fracture determine cell centers close enough
+ 3. compute XY local coords and if in the Shape
+
+ :param domain:
+ :return:
+ """
+ """
+ Estimate intersections between grid cells and fractures
+
+ Temporary interface to original map_dfn code inorder to perform one to one test.
+ """
+ import bgem.upscale.decovalex_dfnmap as dmap
+ assert dfn.base_shape_idx == EllipseShape.id
+
+ domain = FracturedDomain(dfn, np.ones(len(dfn)), grid)
+ ellipses = [dmap.Ellipse(fr.normal, fr.center, fr.scale*fr.shape.R) for fr in dfn]
+ d_grid = dmap.Grid.make_grid(domain.grid.origin, domain.grid.step, grid.dimensions)
+ d_fractures = dmap.map_dfn(d_grid, ellipses)
+ i_pairs = [(i_c, i_f) for i_f, fr in enumerate(d_fractures) for i_c in fr.cells]
+ if i_pairs:
+ i_cell, i_fr = zip(*i_pairs)
+ else:
+ i_cell = []
+ i_fr = []
+ # fr, cell = zip([(i_fr, i_cell) for i_fr, fr in enumerate(fractures) for i_cell in fr.cells])
+ return Intersection.const_isec(domain, i_cell, i_fr, 1.0)
+
+
+__rel_corner = np.array([[0, 0, 0], [1, 0, 0],
+ [1, 1, 0], [0, 1, 0],
+ [0, 0, 1], [1, 0, 1],
+ [1, 1, 1], [0, 1, 1]])
+
+def intersect_cell(loc_corners: np.array, shape) -> bool:
+ """
+ loc_corners - shape (3, 8)
+ """
+ # check if cell center is inside radius of fracture
+ center = np.mean(loc_corners, axis=1)
+ if not shape.is_point_inside(*center[:2]):
+ return False
+
+ # cell center is in ellipse
+ # find z of cell corners in xyz of fracture
+
+ if np.min(loc_corners[2, :]) >= 0. or np.max(loc_corners[2, :]) < 0.: # fracture lies in z=0 plane
+ # fracture intersects that cell
+ return False
+
+ return True
+
+def intersection_cell_corners(dfn:FractureSet, grid: Grid) -> 'Intersection':
+ domain = FracturedDomain(dfn, np.ones(len(dfn)), grid)
+
+ i_cell = []
+ i_fr = []
+ for i in range(len(dfn)):
+ i_box_min, i_box_max = grid.coord_aabb(dfn.AABB[i])
+ axis_ranges = [range(max(0, a), min(b, n)) for a, b, n in zip(i_box_min, i_box_max, grid.shape)]
+
+ grid_cumul_prod = np.array([1, grid.shape[0], grid.shape[0] * grid.shape[1]])
+ # X fastest running
+ for kji in itertools.product(*reversed(axis_ranges)):
+ # make X the first coordinate
+ ijk = np.flip(np.array(kji))
+ corners = grid.origin[None, :] + (ijk[None, :] + __rel_corner[:, :]) * grid.step[None, :]
+ loc_corners = dfn.inv_transform_mat[i] @ (corners - dfn.center[i]).T
+ if intersect_cell(loc_corners, dfn.base_shape):
+ #logging.log(logging.DEBUG, f" cell {ijk}")
+ cell_index = ijk @ grid_cumul_prod
+ i_cell.append(cell_index)
+ i_fr.append(i)
+
+ return Intersection.const_isec(domain, i_cell, i_fr, 1.0)
+
+
+def intersection_interpolation(domain: FracturedDomain) -> 'Intersection':
+ """
+ Approximate fast intersection for small number of fractures.
+ 1. fractures are encoded as decreasing 2 powers: 2**(-i_fr), assume fractures sorted from large down
+ 2. place points on the fractures with their values
+ 3. project all points to ambient space by transform matrices (use advanced indexing)
+ 4. summ to the cells
+ 5. get few largest fractures in each cell
+ :param domain:
+ :return:
+ """
+def intersection_band_antialias(domain: FracturedDomain) -> 'Intersection':
+ """
+ This approach interprets fracutures as bands of given cross-section,
+ the interpolation is based on a fast approximation of the volume of the band-cell
+ intersection. The band-cell intersection candidates are determined by modified decovalex algorithm.
+ """
+ # logging.log(logging.INFO, f"Callculating Fracture - Cell intersections ...")
+ # dfn = domain.dfn
+ # grid = domain.grid
+ # for fr, aabb, trans_mat in zip(dfn, dfn.AABB, dfn.inv_transform_mat):
+ # min_corner_cell, max_corner_cell = grid.project_points(aabb.reshape(2, 3))
+ # axis_ranges = [range(max(0, a), min(b, n))
+ # for a, b, n in zip(min_corner_cell, max_corner_cell, grid.shape)]
+ # itertools.product(*reversed(axis_ranges))
+ # grid.
+ # return [fracture_for_ellipse(grid, ie, ellipse) for ie, ellipse in enumerate(ellipses)]
+ pass
+
+def fr_conductivity(dfn:FractureSet, cross_section_factor = 1e-4, perm_factor = 1.0 ):
+ """
+
+ :param dfn:
+ :param cross_section_factor: scalar = cross_section / fracture mean radius
+ :return:
+ """
+ rho = 1000
+ g = 9.81
+ viscosity = 8.9e-4
+ perm_to_cond = rho * g / viscosity
+ cross_section = cross_section_factor * np.sqrt(np.prod(dfn.radius, axis=1))
+ perm = perm_factor * cross_section * cross_section / 12
+ conductivity = perm_to_cond * perm
+ cond_tn = conductivity[:, None, None] * (np.eye(3) - dfn.normal[:, :, None] * dfn.normal[:, None, :])
+
+ return cross_section, cond_tn
+# ============ DEPRECATED
+
+@attrs.define
+class FracturedMedia:
+ """
+ Representation of the fractured media sample.
+ Geometry:
+ dfn + grid or dfn + arbitrary bulk points
+ Fields, should rather be separated for different type of quantities.
+ scalar (porosity): scalars (fields in future) on fractures, bulk scalar field on grid or at points
+ vector (velocity): vectors on fractures, vector bulk field
+ 2d-tensor (conductivity): tensors on fractures, tensor bulk field
+ scalars on fractures -> imply scalar * (n \otimes n) tensor
+ 4d-cauchy tensor: ?
+ 4d-dispersion tensor: ? it should describe second order, i.e. variance of the velocity field
+ Seems reasonable to assume that all quantities are homogenized as weighted avarages of fracture and bulk values.
+
+ 1. DFN imply a box
+ 2. If we add a grid step we can specify bulk values on that grid
+ 3. voxelization grid could be independent. Make interpolation in each axis independently.
+
+ Depricated design. We should separate interpolation matrix from the value arrays.
+ TODO: use FracturedMedia instead separate arrays.
+ """
+ dfn: FractureSet #
+ fr_cross_section: np.ndarray # shape (n_fractures,)
+ fr_conductivity: np.ndarray # shape (n_fractures,)
+ conductivity: float
+
+
+ @staticmethod
+ def fracture_cond_params(dfn :FractureSet, unit_cross_section, bulk_conductivity):
+ # unit_cross_section = 1e-4
+ viscosity = 1e-3
+ gravity_accel = 10
+ density = 1000
+ permeability_factor = 1 / 12
+ permeability_to_conductivity = gravity_accel * density / viscosity
+ # fr cond r=100 ~ 80
+ # fr cond r=10 ~ 0.8
+ fr_r = np.array([fr.r for fr in dfn])
+ fr_cross_section = unit_cross_section * fr_r
+ fr_cond = permeability_to_conductivity * permeability_factor * fr_r ** 2
+ fr_cond = np.full_like(fr_r, 10)
+ return FracturedMedia(dfn, fr_cross_section, fr_cond, bulk_conductivity)
+
+ @classmethod
+ def _read_dfn_file(cls, f_path):
+ with open(f_path, 'r') as file:
+ rdr = csv.reader(filter(lambda row: row[0] != '#', file), delimiter=' ', skipinitialspace=True)
+ return [row for row in rdr]
+
+ @classmethod
+ def from_dfn_works(cls, input_dir: Union[Path, str], bulk_conductivity):
+ '''
+ Read dfnWorks-Version2.0 output files:
+ normal_vectors.dat - three values per line, normal vectors
+ translations.dat - three values per line, fracture centers,
+ 'R' marks isolated fracture, currently ignored
+ radii.dat - three values per line: (major_r, minor_r, shape_family)
+ shape_family: -1 = RectangleShape, 0 = EllipseShape, >0 fracture family index
+ (unfortunate format as it mixes two different attributes ahape and fracture statistical family, which are independent)
+ perm.dat - 6 values per line; 4th is permitivity
+ aperture.dat - 4 values per line; 4th is apperture
+ polygons.dat - not used, DFN triangulation
+
+ :param source_dir: directory with the files
+ :param bulk_conductivity: background / bulk conductivity
+ (constant only)
+ :return: FracturedMedia
+ '''
+ __radiifile = 'radii.dat'
+ __normalfile = 'normal_vectors.dat'
+ __transfile = 'translations.dat'
+ __permfile = 'perm.dat'
+ __aperturefile = 'aperture.dat'
+ workdir = Path(input_dir)
+
+ radii = np.array(cls._read_dfn_file(workdir / __radiifile), dtype=float)
+ n_frac = radii.shape[0]
+ radii = radii[:, 0:2]
+ assert radii.shape[1] == 2
+ normals = np.array(cls._read_dfn_file(workdir / __normalfile), dtype=float)
+ assert normals.shape == (n_frac, 3)
+ translations = np.array([t for t in cls._read_dfn_file(workdir / __transfile) if t[-1] != 'R'], dtype=float)
+ assert translations.shape == (n_frac, 3)
+ # permeability = np.array(cls._read_dfn_file(workdir / __permfile), dtype=float)[:, 3]
+ # apperture = np.array(cls._read_dfn_file(workdir / __aperturefile), dtype=float)[:, 3]
+ shape_axis = np.repeat(n_frac, np.array([1, 0]), axis=0)
+ shape_idx = EllipseShape().id
+ dfn = FractureSet(shape_idx, radii, translations, normals, shape_axis)
+ return cls(dfn, None, None )
+
+
+
+
+def intersections_centers(grid: Grid, fractures: List[Fracture]):
+ """
+ Estimate intersections between grid cells and fractures
+
+ 1. for all fractures compute what could be computed in vector fashion
+ 2. for each fracture determine cell centers close enough
+ 3. compute XY local coords and if in the Shape
+ """
+ fr_normal = np.array([fr.normal for fr in fractures])
+ fr_center = np.array([fr.center for fr in fractures])
+ import decovalex_dfnmap as dmap
+
+ ellipses = [dmap.Ellipse(fr.normal, fr.center, fr.scale) for fr in fractures]
+ d_grid = dmap.Grid.make_grid(grid.origin, grid.step, grid.dimensions)
+ d_fractures = dmap.map_dfn(d_grid, ellipses)
+ i_fr_cell = np.stack([(i_fr, i_cell) for i_fr, fr in enumerate(d_fractures) for i_cell in fr.cells])
+ #fr, cell = zip([(i_fr, i_cell) for i_fr, fr in enumerate(fractures) for i_cell in fr.cells])
+ return Intersection(grid, fractures, i_fr_cell, None)
+
+
+def intersections_decovalex(grid: Grid, fractures: List[Fracture]):
+ """
+ Estimate intersections between grid cells and fractures
+
+ Temporary interface to original map_dfn code inorder to perform one to one test.
+ """
+ import decovalex_dfnmap as dmap
+
+ ellipses = [dmap.Ellipse(fr.normal, fr.center, fr.scale) for fr in fractures]
+ d_grid = dmap.Grid.make_grid(grid.origin, grid.step, grid.dimensions)
+ d_fractures = dmap.map_dfn(d_grid, ellipses)
+ i_fr_cell = np.stack([(i_fr, i_cell) for i_fr, fr in enumerate(d_fractures) for i_cell in fr.cells])
+ #fr, cell = zip([(i_fr, i_cell) for i_fr, fr in enumerate(fractures) for i_cell in fr.cells])
+ return Intersection(grid, fractures, i_fr_cell, None)
+
+def perm_aniso_fr_values(fractures, fr_transmisivity: np.array, grid_step) -> np.ndarray:
+ '''Calculate anisotropic permeability tensor for each cell of ECPM
+ intersected by one or more fractures. Discard off-diagonal components
+ of the tensor. Assign background permeability to cells not intersected
+ by fractures.
+ Return numpy array of anisotropic permeability (3 components) for each
+ cell in the ECPM.
+
+ fracture = numpy array containing number of fractures in each cell, list of fracture numbers in each cell
+ ellipses = [{}] containing normal and translation vectors for each fracture
+ T = [] containing intrinsic transmissivity for each fracture
+ d = length of cell sides
+ k_background = float background permeability for cells with no fractures in them
+ '''
+ assert len(fractures) == len(fr_transmisivity)
+ # Construc array of fracture tensors
+ def full_tensor(n, fr_cond):
+ normal = np.array(n)
+ normal_axis_step = grid_step[np.argmax(np.abs(n))]
+ return fr_cond * (np.eye(3) - normal[:, None] * normal[None, :]) / normal_axis_step
+
+ return np.array([full_tensor(fr.normal, fr_cond) for fr, fr_cond in zip(fractures, fr_transmisivity)])
+
+
+def perm_iso_fr_values(fractures, fr_transmisivity: np.array, grid_step) -> np.ndarray:
+ '''Calculate isotropic permeability for each cell of ECPM intersected by
+ one or more fractures. Sums fracture transmissivities and divides by
+ cell length (d) to calculate cell permeability.
+ Assign background permeability to cells not intersected by fractures.
+ Returns numpy array of isotropic permeability for each cell in the ECPM.
+
+ fracture = numpy array containing number of fractures in each cell, list of fracture numbers in each cell
+ T = [] containing intrinsic transmissivity for each fracture
+ d = length of cell sides
+ k_background = float background permeability for cells with no fractures in them
+ '''
+ assert len(fractures) == len(fr_transmisivity)
+ fr_norm = np.array([fr.normal for fr in fractures])
+ normalised_transmissivity = fr_transmisivity / grid_step[np.argmax(np.abs(fr_norm), axis=1)]
+ return normalised_transmissivity
+
+def _conductivity_decovalex(fr_media: FracturedMedia, grid: Grid, fr_values_fn):
+ isec = intersections_decovalex(grid, fr_media.dfn)
+ fr_transmissivity = fr_media.fr_conductivity * fr_media.fr_cross_section
+ fr_values = fr_values_fn(isec.fractures, fr_transmissivity, isec.grid.step)
+ # accumulate tensors in cells
+ ncells = isec.grid.n_elements
+ k_aniso = np.full((ncells, *fr_values.shape[1:]), fr_media.conductivity, dtype=np.float64)
+ np.add.at(k_aniso, isec.i_fr_cell[:,1], fr_values[isec.i_fr_cell[:,0]])
+ return k_aniso #arange_for_hdf5(grid, k_iso).flatten()
+
+def permeability_aniso_decovalex(fr_media: FracturedMedia, grid: Grid):
+ return _conductivity_decovalex(fr_media, grid, perm_aniso_fr_values)
+
+def permeability_iso_decovalex(fr_media: FracturedMedia, grid: Grid):
+ return _conductivity_decovalex(fr_media, grid, perm_iso_fr_values)
+
+
+
+
+
+def aniso_lump(tn_array):
+ """
+ Convert array of full anisotropic tensors to the array of diagonal
+ tensors by lumping (summing) tensor rows to the diagonal.
+ :param tn_array: shape (n, k, k)
+ """
+ assert len(tn_array.shape) == 3
+ assert tn_array.shape[1] == tn_array.shape[2]
+ return np.sum(tn_array, axis=-1)[:, None, :] * np.eye(3)
+
+def aniso_diag(tn_array):
+ """
+ Convert array of full anisotropic tensors to the array of diagonal
+ tensors by extraction only diagonal elements.
+ :param tn_array: shape (n, k, k)
+ """
+ assert len(tn_array.shape) == 3
+ assert tn_array.shape[1] == tn_array.shape[2]
+ return tn_array * np.eye(3)[None, :, :]
+
+
+
+
+@attrs.define
+class FractureVoxelize:
+ """
+ Auxiliary class with intersection of fractures with a (structured, rectangular) grid.
+ The class itslef could be used for any types of elements, but the supported voxelization algorithms
+ are specific for the uniform rectangular grid, allowing different step for each of X, Y, Z directions.
+
+ The intersections could be understood as a sparse matrix for computing cell scalar property as:
+ i - grid index, j - fracture index
+ grid_property[i] = (1 - sum_j intersection[i, j]) * bulk_property[i] + sum_j intersection[i, j] * fr_property[j]
+
+ The sparse matrix 'intersection' is formed in terms of the triplex lists: cell_id, fracture_id, volume.
+ It actualy is intersection_volume[i,j] / cell_volume[i] , the cell_volume is minimum of the volume of the i-th cell
+ and sum of volumes of the intersectiong fracutres.
+
+ The cached properties for the bulk weight vector and fracture interpolation sparse matrix for efficient multiplication
+ are provided.
+ DEPRECATED DESIGN.
+ - The interpolation should be provided by the sparse interpolation matrix
+ - Input of the interpolation shuld be a connected vector of both bulk and fracture values
+ """
+ grid: 'Grid' # Any grid composed of numbered cells.
+ cell_ids: List[int] # For each intersection the cell id.
+ fr_ids: List[int] # For each intersection the fracture id.
+ volume: List[float] # For each intersection the intersection fracture volume estimate.
+
+
+
+ # @cached_property
+ # def cell_fr_sums(self):
+ # cell_sums = np.zeros(, dtype=np.float64)
+ #
+
+ def project_property(self, fr_property, bulk_property):
+ pass
+
+class FractureBoundaries3d:
+ @staticmethod
+ def build(polygons):
+ n_fractures, n_points, dim = polygons
+ assert dim == 3
+ assert n_points % 2 == 0
+
+ # Get AABB and sort coordinates from largest to smallest
+ aabb_min = polygons.min(axis=1)
+ aabb_max = polygons.max(axis=1)
+ aabb_ptp = aabb_max - aabb_min
+ axes_sort = np.argsort(-aabb_ptp, axis=1)
+ aabb_min_sort = aabb_min[:, axes_sort]
+ aabb_max_sort = aabb_max[:, axes_sort]
+ polygons_sort = polygons[:, :, axes_sort]
+ # for evary fracture get sequence of points from >=X_min to (aabb_min_sort[:, 1] + aabb_min_sort[:, 1]) / 2
+
+ # half of points + 1 to get the end point as well.
+ # We get other half by central symmetry.
+ selected_indices = (argmin_X[:, None] + np.arange(n_points // 2 + 1)[None, :]) % n_points
+
+ o_grid = np.ogrid[:n_fractures, :3]
+ all_fractures = np.arange(n_fractures)[:, None, None]
+ all_dims = np.arange(3)[None, None, :]
+ half_arc = polygons[all_fractures, selected_indices[:, :, None], all_dims]
+ """
+ 1. Use half arc to generate Y ranges in the X range.
+ This produces variable size arrays and could not be implemented in Numpy efficeintly.
+ Use classical loop over fractures and over lines. Generate list of XY celles, compute estimate of XY projection,
+ interior cells and list of boundary cells.
+ Interior cells - use normal, Z distance from center, and fracture aperture
+ to determine tensor contribution, multiply by XY projection for the boundary cells.
+ """
+
+
+def form_table():
+ pass
+
+
+def unit_area_tab(x, y, z_slack):
+ """
+ Assume 1 > x > y > 0.
+ 1 > z_slack > 0
+ :return: approx area of intersection of fracture plane in distance z_slack from origin
+
+ """
+
+
+def tensor_contribution(normal, slack, slack_axis, aperture):
+ """
+ Compute contribution to the cell equivalent/fabric tensor.
+ We assume aperture et most 1/10 of min cell dimension.
+
+ normal - fracture normal vector
+ slack - vector from cel center to fracture with single nonzero component, the minimum one.
+ should be relatively close to normal (up to orientation)
+ angle to normal on unit cube at most 50degs
+ aperture of the fracture
+ :return: 3d fabric tensor
+
+ 1. scale to unit cell
+ 2. approx surface of intersection on unit cell
+ 3. scale surface back
+ 3. tn = surf * apperture / 1 * (n otimes n)
+ 4. scale back
+
+ ===
+ - We will scale whole coord system to have unit cells, possibly scaling back individual equivalent tensors
+ or fracture eq. tensors.
+ This will guarantee that slack direction is that max. component of the normal.
+ """
+ normal_reminder = np.abs(np.delete(normal, slack_axis)) / normal[slack_axis]
+ normal_rel_max = np.max(normal_reminder)
+ normal_rel_min = np.min(normal_reminder)
+ area = unit_area_tab(normal_rel_max, normal_rel_min, slack)
+ rel_area = aperture * area / np.dot(normal, cell)
+ tn = rel_area * normal[:, None] * normal[None, :]
+ return tn
+
+
+
+
+# =================
+# Main interface functions
+# Usage example:
+# bulk_grid ... grid of the input bulk values
+# homo_grid ... output grid of homogenization
+# bulk_on_homo_field = bulk_interpolate(bulk_geometry, bulk_field, homo_grid)
+# # bulk_geometry is either grid or array of 3d points where bulk_field is given
+# A, B = voxelize_xyz(dfn, homo_grid)
+# homo_field = A * bulk_on_homo_field + B @ fracture_field
+# # or
+# homo_field = A * bulk_on_homo_field + B @ normal_field(dfn, scalar_fracture_field)
+# or a function
+# homogenize(voxel_obj(dfn, A, B, homo_grid), bulk_on_homo_field, scalar_fracture_field)
+# =================
+
+@attrs.define
+class Homogenize:
+ """
+ Class representing intersection of the fractures with a regular grid,
+ capable to perform avarage homogenization of individual fields.
+ dfn: FractureSet, ):
+
+ Several methods supported:
+ - source mixed mesh -> regular gird, using projection of Guass points tot he regular grid,
+ adaptive refinement relative to the target mesh
+ - decovalex voxelization for bulk on a grid pre´fractures any
+ - eter meash
+
+ """
+ domain: FracturedDomain
+ bulk_scaling: np.ndarray # (N_homo_grid_cells, field_shape)
+ fracture_interpolation: Any # sparse matrix (N_homo_grid_cells, n_fractures, field_shape)
+
+ @staticmethod
+ def mesh(mesh_path, grid: Grid):
+ grid_cell_volume = np.prod(grid.step) / 27
+
+ ref_el_2d = np.array([(0, 0), (1, 0), (0, 1)])
+ ref_el_3d = np.array([(0, 0, 0), (1, 0, 0), (0, 1, 0), (0, 0, 1)])
+
+
+ pvd_content = pv.get_reader(flow_out.hydro.spatial_file.path)
+ pvd_content.set_active_time_point(0)
+ dataset = pvd_content.read()[0] # Take first block of the Multiblock dataset
+
+ velocities = dataset.cell_data['velocity_p0']
+ cross_section = dataset.cell_data['cross_section']
+
+ p_dataset = dataset.cell_data_to_point_data()
+ p_dataset.point_data['velocity_magnitude'] = np.linalg.norm(p_dataset.point_data['velocity_p0'], axis=1)
+ plane = pv.Plane(center=(0, 0, 0), direction=(0, 0, 1))
+ cut_dataset = p_dataset.clip_surface(plane)
+
+ plotter = pv.Plotter()
+ plotter.add_mesh(p_dataset, color='white', opacity=0.3, label='Original Dataset')
+ plotter.add_mesh(cut_dataset, scalars='velocity_magnitude', cmap='viridis', label='Velocity Magnitude')
+
+ # Add legend and show the plot
+ plotter.add_scalar_bar(title='Velocity Magnitude')
+ plotter.add_legend()
+ plotter.show()
+
+ # num_cells = dataset.n_cells
+ # shifts = np.zeros((num_cells, 3))
+ # transform_matrices = np.zeros((num_cells, 3, 3))
+ # volumes = np.zeros(num_cells)
+
+ weights_sum = np.zeros((grid.n_elements,))
+ grid_velocities = np.zeros((grid.n_elements, 3))
+ levels = np.zeros(dataset.n_cells, dtype=np.int32)
+ # Loop through each cell
+ for i in range(dataset.n_cells):
+ cell = dataset.extract_cells(i)
+ points = cell.points
+
+ if len(points) < 3:
+ continue # Skip cells with less than 3 vertices
+
+ # Shift: the first vertex of the cell
+ shift = points[0]
+ # shifts[i] = shift
+
+ transform_matrix = points[1:] - shift
+ if len(points) == 4: # Tetrahedron
+ # For a tetrahedron, we use all three vectors formed from the first vertex
+ # transform_matrices[i] = transform_matrix[:3].T
+ # Volume calculation for a tetrahedron:
+ volume = np.abs(np.linalg.det(transform_matrix[:3])) / 6
+ ref_el = ref_el_3d
+ elif len(points) == 3: # Triangle
+ # For a triangle, we use only two vectors
+ # transform_matrices[i, :2] = transform_matrix.T
+ # Area calculation for a triangle:
+ volume = 0.5 * np.linalg.norm(np.cross(transform_matrix[0], transform_matrix[1])) * cross_section[i]
+ ref_el = ref_el_2d
+ level = max(int(np.log2(volume / grid_cell_volume) / 3.0), 0)
+ levels[i] = level
+ ref_barycenters = refine_barycenters(ref_el[None, :, :], level)
+ barycenters = shift[None, :] + ref_barycenters @ transform_matrix
+ grid_indices = grid.project_points(barycenters)
+ weights_sum[grid_indices] += volume
+ #
+ # grid_velocities[grid_indices] += volume * velocities[i]
+
+ values.extend(len(grid_indices)*[volume])
+ rows.extend(grid_indices)
+ cols.extend(len(grid_indices) * [i])
+ #print(np.bincount(levels))
+ #grid_velocities = grid_velocities / weights_sum[:, None]
+
+ values[:] /= weights_sum[rows[:]]
+
+ sp.csr_matrix((vals, (rows, cols)), shape=(grid.n_elements, dataset.n_cells))
+ return grid_velocities
+
+ # @staticmethod
+ # def (dfn: FractureSet, fr_cross_section: np.ndarray, grid: Grid):
+ # """
+ # Create the grid - fracture set intersection object using particular voxelization algorithm.
+ # :return:
+ # """
+ # return
+
+
+ def __call__(self, bulk_field, fracture_field):
+ assert bulk_field.shape[1:] == fracture_field.shape[1:]
+ field_shape = bulk_field.shape[1:]
+ assert bulk_field.shape[0] == self.domain.grid.n_elements
+ n_bulk = bulk_field.shape[0]
+ assert fracture_field.shape[0] == len(self.domain.dfn)
+ n_frac = fracture_field.shape[0]
+ bulk_f = bulk_field.reshape(n_bulk, -1)
+ frac_f = fracture_field.reshape(n_frac, -1)
+ result_f = self.bulk_scaling[:, None] * bulk_f + self.fracture_interpolation @ frac_f
+ return result_f.reshape(n_bulk, *field_shape)
+
+
+ def interpolate_grid(self, bulk_grid, bulk_field, fracture_field):
+ """
+ TODO: Interpolate bulk_filed given at bulk_grid to the domain.grid,
+ then return result of call: `self(interpolated_field, fracture_field)`
+
+ :param bulk_points: (n_points, 3)
+ :param bulk_field: (n_points, field_shape)
+ :param fracture_field:
+ :return:
+ """
+ pass
+
+ def interpolate_points(self, bulk_points, bulk_field, fracture_field):
+ """
+ TODO: Interpolate bulk_filed given at bulk_points to the domain.grid,
+ then return result of call: `self(interpolated_field, fracture_field)`
+
+ :param bulk_points: (n_points, 3)
+ :param bulk_field: (n_points, field_shape)
+ :param fracture_field:
+ :return:
+ """
+ pass
+
+def voxelize(dfn, bulk_grid):
+ """
+ Compute a sparse matrices for average homogenization:
+ homo_field = bulk_values_on_grid + A @ fracture_values
+ :param dfn:
+ :param bluk_grid:
+ :return:
+ """
+
+
diff --git a/tests/_pycharm_run/pytest fem.run.xml b/tests/_pycharm_run/pytest fem.run.xml
new file mode 100644
index 0000000..b66e7da
--- /dev/null
+++ b/tests/_pycharm_run/pytest fem.run.xml
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/tests/conftest.py b/tests/conftest.py
index b60011d..ea205d3 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -17,4 +17,5 @@
# img = compute_expensive_image()
# fn = tmpdir_factory.mktemp('data').join('img.png')
# img.save(str(fn))
-# return fn
\ No newline at end of file
+# return fn
+
diff --git a/tests/fixtures.py b/tests/fixtures.py
index a291930..c4dd31d 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -6,6 +6,10 @@
from time import perf_counter
from contextlib import contextmanager
+from bgem import stochastic
+import numpy as np
+
+
def sandbox_fname(base_name, ext):
work_dir = "sandbox"
Path(work_dir).mkdir(parents=True, exist_ok=True)
@@ -37,3 +41,58 @@ def __repr__(self):
+
+
+fracture_stats = dict(
+ NS={'concentration': 17.8,
+ 'p_32': 0.094,
+ 'plunge': 1,
+ 'power': 2.5,
+ 'r_max': 564,
+ 'r_min': 0.038,
+ 'trend': 292},
+ NE={'concentration': 14.3,
+ 'p_32': 0.163,
+ 'plunge': 2,
+ 'power': 2.7,
+ 'r_max': 564,
+ 'r_min': 0.038,
+ 'trend': 326},
+ NW={'concentration': 12.9,
+ 'p_32': 0.098,
+ 'plunge': 6,
+ 'power': 3.1,
+ 'r_max': 564,
+ 'r_min': 0.038,
+ 'trend': 60},
+ EW={'concentration': 14.0,
+ 'p_32': 0.039,
+ 'plunge': 2,
+ 'power': 3.1,
+ 'r_max': 564,
+ 'r_min': 0.038,
+ 'trend': 15},
+ HZ={'concentration': 15.2,
+ 'p_32': 0.141,
+ 'power': 2.38,
+ 'r_max': 564,
+ 'r_min': 0.038,
+ #'trend': 5
+ #'plunge': 86,
+ 'strike': 95,
+ 'dip': 4
+ })
+
+def get_dfn_sample(box_size=100, seed=123):
+ # generate fracture set
+ np.random.seed(seed)
+ fracture_box = 3 * [box_size]
+ # volume = np.product()
+ pop = stochastic.Population.from_cfg(fracture_stats, fracture_box)
+ # pop.initialize()
+ pop = pop.set_range_from_size(sample_size=30)
+ mean_size = pop.mean_size()
+ print("total mean size: ", mean_size)
+ pos_gen = stochastic.UniformBoxPosition(fracture_box)
+ fractures = pop.sample(pos_distr=pos_gen, keep_nonempty=True)
+ return fractures
\ No newline at end of file
diff --git a/tests/gmsh/complex/test_heal_complex.py b/tests/gmsh/complex/test_heal_complex.py
index 367f66e..b98ff5f 100644
--- a/tests/gmsh/complex/test_heal_complex.py
+++ b/tests/gmsh/complex/test_heal_complex.py
@@ -21,6 +21,6 @@ def test_on_mesh_samples(mesh, tol):
hm = heal_mesh.HealMesh.read_mesh(mesh_path, node_tol=tol*0.01)
hm.heal_mesh(gamma_tol=tol)
hist, bins, bad_els = hm.quality_statistics(bad_el_tol=tol)
- for name, h in hist.items():
- hm.print_stats(h, bins, name)
- print("# bad els: ", len(bad_els[name]))
+ #for name, h in hist.items():
+ hm.print_stats(hist, bins, "gamma")
+ print("# bad els (gamma): ", len(bad_els))
diff --git a/tests/gmsh/test_gmsh_io.py b/tests/gmsh/test_gmsh_io.py
index 9bddfbf..0fd3def 100644
--- a/tests/gmsh/test_gmsh_io.py
+++ b/tests/gmsh/test_gmsh_io.py
@@ -1,3 +1,4 @@
+import pytest
from bgem.gmsh.gmsh_io import GmshIO
import os
@@ -7,6 +8,8 @@
MESHES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "meshes")
+# Need a fix, never ends
+@pytest.mark.skip
def test_read():
gio = GmshIO(os.path.join(MESHES_DIR, "cube_1x1x1_frac_coarse.msh2"))
assert len(gio.nodes) == 75
diff --git a/tests/pycharm_run/pytest two-scale.run.xml b/tests/pycharm_run/pytest two-scale.run.xml
new file mode 100644
index 0000000..7e390b5
--- /dev/null
+++ b/tests/pycharm_run/pytest two-scale.run.xml
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/tests/stochastic/stub_geometry_3d.py b/tests/stochastic/stub_geometry_3d.py
new file mode 100644
index 0000000..de45bfe
--- /dev/null
+++ b/tests/stochastic/stub_geometry_3d.py
@@ -0,0 +1,727 @@
+"""
+Stub functions toward own 3D fracture geometry.
+"""
+import pytest
+import os
+import attr
+import numpy as np
+import collections
+# import matplotlib.pyplot as plt
+
+# from bgem
+from bgem.gmsh import gmsh
+from bgem.gmsh import options as gmsh_options
+from bgem.gmsh import field as gmsh_field
+from bgem.stochastic import frac_plane as FP
+from bgem.stochastic import frac_isec as FIC
+from bgem.stochastic import fr_set
+from bgem.stochastic import dfn
+from bgem.bspline import brep_writer as bw
+from bgem import Transform
+from fixtures import sandbox_fname
+#script_dir = os.path.dirname(os.path.realpath(__file__))
+
+
+
+def generate_uniform(statistics, n_frac_limit):
+ # generate fracture set
+ box_size = 100
+ fracture_box = 3 * [box_size]
+ #volume = np.product()
+ pop = dfn.Population.from_cfg(statistics, fracture_box)
+ #pop.initialize()
+ pop = pop.set_range_from_size(sample_size=n_frac_limit)
+ mean_size = pop.mean_size()
+ print("total mean size: ", mean_size)
+ pos_gen = dfn.UniformBoxPosition(fracture_box)
+ fractures = pop.sample(pos_distr=pos_gen, keep_nonempty=True)
+ # fracture.fr_intersect(fractures)
+
+ return fractures
+
+
+# def config_fracture_regions(used_families):
+# for model in ["hm_params", "th_params", "th_params_ref"]:
+# model_dict = config_dict[model]
+# model_dict["fracture_regions"] = list(used_families)
+# model_dict["left_well_fracture_regions"] = [".{}_left_well".format(f) for f in used_families]
+# model_dict["right_well_fracture_regions"] = [".{}_right_well".format(f) for f in used_families]
+
+
+def create_fractures_rectangles(gmsh_geom, fractures, base_shape: 'ObjectSet'):
+ # From given fracture date list 'fractures'.
+ # transform the base_shape to fracture objects
+ # fragment fractures by their intersections
+ # return dict: fracture.region -> GMSHobject with corresponding fracture fragments
+ shapes = []
+ for i, fr in enumerate(fractures):
+ shape = base_shape.copy()
+ print("fr: ", i, "tag: ", shape.dim_tags)
+ shape = shape.scale([fr.rx, fr.ry, 1]) \
+ .rotate(axis=fr.rotation_axis, angle=fr.rotation_angle) \
+ .translate(fr.center) \
+ .set_region(fr.region)
+
+ shapes.append(shape)
+
+ fracture_fragments = gmsh_geom.fragment(*shapes)
+ return fracture_fragments
+
+
+def create_fractures_polygons(gmsh_geom, fractures):
+ # From given fracture date list 'fractures'.
+ # transform the base_shape to fracture objects
+ # fragment fractures by their intersections
+ # return dict: fracture.region -> GMSHobject with corresponding fracture fragments
+ frac_obj = fracture.Fractures(fractures)
+ frac_obj.snap_vertices_and_edges()
+ shapes = []
+ for fr, square in zip(fractures, frac_obj.squares):
+ shape = gmsh_geom.make_polygon(square).set_region(fr.region)
+ shapes.append(shape)
+
+ fracture_fragments = gmsh_geom.fragment(*shapes)
+ return fracture_fragments
+
+
+def make_mesh(geometry_dict, fractures: fr_set.Fracture, mesh_name: str):
+ """
+ Create the GMSH mesh from a list of fractures using the bgem.gmsh interface.
+ """
+ fracture_mesh_step = geometry_dict['fracture_mesh_step']
+ dimensions = geometry_dict["box_dimensions"]
+ well_z0, well_z1 = geometry_dict["well_openning"]
+ well_r = geometry_dict["well_effective_radius"]
+ well_dist = geometry_dict["well_distance"]
+
+ factory = gmsh.GeometryOCC(mesh_name, verbose=True)
+ gopt = gmsh_options.Geometry()
+ gopt.Tolerance = 0.0001
+ gopt.ToleranceBoolean = 0.001
+ # gopt.MatchMeshTolerance = 1e-1
+
+ # Main box
+ box = factory.box(dimensions).set_region("box")
+ side_z = factory.rectangle([dimensions[0], dimensions[1]])
+ side_y = factory.rectangle([dimensions[0], dimensions[2]])
+ side_x = factory.rectangle([dimensions[2], dimensions[1]])
+ sides = dict(
+ side_z0=side_z.copy().translate([0, 0, -dimensions[2] / 2]),
+ side_z1=side_z.copy().translate([0, 0, +dimensions[2] / 2]),
+ side_y0=side_y.copy().translate([0, 0, -dimensions[1] / 2]).rotate([-1, 0, 0], np.pi / 2),
+ side_y1=side_y.copy().translate([0, 0, +dimensions[1] / 2]).rotate([-1, 0, 0], np.pi / 2),
+ side_x0=side_x.copy().translate([0, 0, -dimensions[0] / 2]).rotate([0, 1, 0], np.pi / 2),
+ side_x1=side_x.copy().translate([0, 0, +dimensions[0] / 2]).rotate([0, 1, 0], np.pi / 2)
+ )
+ for name, side in sides.items():
+ side.modify_regions(name)
+
+ b_box = box.get_boundary().copy()
+
+ # two vertical cut-off wells, just permeable part
+ left_center = [-well_dist / 2, 0, 0]
+ right_center = [+well_dist / 2, 0, 0]
+ left_well = factory.cylinder(well_r, axis=[0, 0, well_z1 - well_z0]) \
+ .translate([0, 0, well_z0]).translate(left_center)
+ right_well = factory.cylinder(well_r, axis=[0, 0, well_z1 - well_z0]) \
+ .translate([0, 0, well_z0]).translate(right_center)
+ b_right_well = right_well.get_boundary()
+ b_left_well = left_well.get_boundary()
+
+ print("n fractures:", len(fractures))
+ fractures = create_fractures_rectangles(factory, fractures, factory.rectangle())
+ # fractures = create_fractures_polygons(factory, fractures)
+ fractures_group = factory.group(*fractures)
+ # fractures_group = fractures_group.remove_small_mass(fracture_mesh_step * fracture_mesh_step / 10)
+
+ # drilled box and its boundary
+ box_drilled = box.cut(left_well, right_well)
+
+ # fractures, fragmented, fractures boundary
+ print("cut fractures by box without wells")
+ fractures_group = fractures_group.intersect(box_drilled.copy())
+ print("fragment fractures")
+ box_fr, fractures_fr = factory.fragment(box_drilled, fractures_group)
+ print("finish geometry")
+ b_box_fr = box_fr.get_boundary()
+ b_left_r = b_box_fr.select_by_intersect(b_left_well).set_region(".left_well")
+ b_right_r = b_box_fr.select_by_intersect(b_right_well).set_region(".right_well")
+
+ box_all = []
+ for name, side_tool in sides.items():
+ isec = b_box_fr.select_by_intersect(side_tool)
+ box_all.append(isec.modify_regions("." + name))
+ box_all.extend([box_fr, b_left_r, b_right_r])
+
+ b_fractures = factory.group(*fractures_fr.get_boundary_per_region())
+ b_fractures_box = b_fractures.select_by_intersect(b_box).modify_regions("{}_box")
+ b_fr_left_well = b_fractures.select_by_intersect(b_left_well).modify_regions("{}_left_well")
+ b_fr_right_well = b_fractures.select_by_intersect(b_right_well).modify_regions("{}_right_well")
+ b_fractures = factory.group(b_fr_left_well, b_fr_right_well, b_fractures_box)
+ mesh_groups = [*box_all, fractures_fr, b_fractures]
+
+ print(fracture_mesh_step)
+ # fractures_fr.set_mesh_step(fracture_mesh_step)
+
+ factory.keep_only(*mesh_groups)
+ factory.remove_duplicate_entities()
+ factory.write_brep()
+
+ min_el_size = fracture_mesh_step / 10
+ fracture_el_size = np.max(dimensions) / 20
+ max_el_size = np.max(dimensions) / 8
+
+ fracture_el_size = gmsh_field.constant(fracture_mesh_step, 10000)
+ frac_el_size_only = gmsh_field.restrict(fracture_el_size, fractures_fr, add_boundary=True)
+ gmsh_field.set_mesh_step_field(frac_el_size_only)
+
+ mesh = gmsh_options.Mesh()
+ # mesh.Algorithm = options.Algorithm2d.MeshAdapt # produce some degenerated 2d elements on fracture boundaries ??
+ # mesh.Algorithm = options.Algorithm2d.Delaunay
+ # mesh.Algorithm = options.Algorithm2d.FrontalDelaunay
+ # mesh.Algorithm3D = options.Algorithm3d.Frontal
+ # mesh.Algorithm3D = options.Algorithm3d.Delaunay
+ mesh.ToleranceInitialDelaunay = 0.01
+ # mesh.ToleranceEdgeLength = fracture_mesh_step / 5
+ mesh.CharacteristicLengthFromPoints = True
+ mesh.CharacteristicLengthFromCurvature = True
+ mesh.CharacteristicLengthExtendFromBoundary = 2
+ mesh.CharacteristicLengthMin = min_el_size
+ mesh.CharacteristicLengthMax = max_el_size
+ mesh.MinimumCirclePoints = 6
+ mesh.MinimumCurvePoints = 2
+
+ # factory.make_mesh(mesh_groups, dim=2)
+ factory.make_mesh(mesh_groups)
+ factory.write_mesh(format=gmsh.MeshFormat.msh2)
+ os.rename(mesh_name + ".msh2", mesh_name + ".msh")
+ factory.show()
+
+
+# def find_fracture_neigh(mesh, fract_regions, n_levels=1):
+# """
+# Find neighboring elements in the bulk rock in the vicinity of the fractures.
+# Creates several levels of neighbors.
+# :param mesh: GmshIO mesh object
+# :param fract_regions: list of physical names of the fracture regions
+# :param n_levels: number of layers of elements from the fractures
+# :return:
+# """
+#
+# # make node -> element map
+# node_els = collections.defaultdict(set)
+# max_ele_id = 0
+# for eid, e in mesh.elements.items():
+# max_ele_id = max(max_ele_id, eid)
+# type, tags, node_ids = e
+# for n in node_ids:
+# node_els[n].add(eid)
+#
+# print("max_ele_id = %d" % max_ele_id)
+#
+# # select ids of fracture regions
+# fr_regs = fract_regions
+# # fr_regs = []
+# # for fr in fract_regions:
+# # rid, dim = mesh.physical['fr']
+# # assert dim == 2
+# # fr_regs.append(rid)
+#
+# # for n in node_els:
+# # if len(node_els[n]) > 1:
+# # print(node_els[n])
+#
+# visited_elements = np.zeros(shape=(max_ele_id+1, 1), dtype=int)
+# fracture_neighbors = []
+#
+# def find_neighbors(mesh, element, level, fracture_neighbors, visited_elements):
+# """
+# Auxiliary function which finds bulk neighbor elements to 'element' and
+# saves them to list 'fracture_neighbors'.
+# 'visited_elements' keeps track of already investigated elements
+# 'level' is number of layer from the fractures in which we search
+# """
+# type, tags, node_ids = element
+# ngh_elements = common_elements(node_ids, mesh, node_els, True)
+# for ngh_eid in ngh_elements:
+# if visited_elements[ngh_eid] > 0:
+# continue
+# ngh_ele = mesh.elements[ngh_eid]
+# ngh_type, ngh_tags, ngh_node_ids = ngh_ele
+# if ngh_type == 4: # if they are bulk elements and not already added
+# visited_elements[ngh_eid] = 1
+# fracture_neighbors.append((ngh_eid, level)) # add them
+#
+# # ele type: 1 - line, 2-triangle, 4-tetrahedron, 15-node
+# # find the first layer of elements neighboring to fractures
+# for eid, e in mesh.elements.items():
+# type, tags, node_ids = e
+# if type == 2: # fracture elements
+# visited_elements[eid] = 1
+# if tags[0] not in fr_regs: # is element in fracture region ?
+# continue
+# find_neighbors(mesh, element=e, level=0, fracture_neighbors=fracture_neighbors,
+# visited_elements=visited_elements)
+#
+# # find next layers of elements from the first layer
+# for i in range(1, n_levels):
+# for eid, lev in fracture_neighbors:
+# if lev < i:
+# e = mesh.elements[eid]
+# find_neighbors(mesh, element=e, level=i, fracture_neighbors=fracture_neighbors,
+# visited_elements=visited_elements)
+#
+# return fracture_neighbors
+#
+#
+# def common_elements(node_ids, mesh, node_els, subset=False, max=1000):
+# """
+# Finds elements common to the given nodes.
+# :param node_ids: Ids of the nodes for which we look for common elements.
+# :param mesh:
+# :param node_els: node -> element map
+# :param subset: if true, it returns all the elements that are adjacent to at least one of the nodes
+# if false, it returns all the elements adjacent to all the nodes
+# :param max:
+# :return:
+# """
+# # Generates active elements common to given nodes.
+# node_sets = [node_els[n] for n in node_ids]
+# if subset:
+# elements = list(set(itertools.chain.from_iterable(node_sets))) # remove duplicities
+# else:
+# elements = set.intersection(*node_sets)
+#
+# if len(elements) > max:
+# print("Too many connected elements:", len(elements), " > ", max)
+# for eid in elements:
+# type, tags, node_ids = mesh.elements[eid]
+# print(" eid: ", eid, node_ids)
+# # return elements
+# return active(mesh, elements)
+#
+#
+# def active(mesh, element_iterable):
+# for eid in element_iterable:
+# if eid in mesh.elements:
+# yield eid
+
+# def test_fracture_neighbors(config_dict):
+# """
+# Function that tests finding fracture neighbors.
+# It outputs mesh data - level per element.
+# :param config_dict:
+# :return:
+# """
+# setup_dir(config_dict, clean=True)
+# mesh_repo = config_dict.get('mesh_repository', None)
+# if mesh_repo:
+# healed_mesh = sample_mesh_repository(mesh_repo)
+# config_fracture_regions(config_dict["fracture_regions"])
+# else:
+# fractures = generate_fractures(config_dict)
+# # plot_fr_orientation(fractures)
+# healed_mesh = prepare_mesh(config_dict, fractures)
+# print("Created mesh: " + os.path.basename(healed_mesh))
+#
+# mesh = gmsh_io.GmshIO(healed_mesh)
+# fracture_neighbors = find_fracture_neigh(mesh, ["fr"], n_levels=3)
+#
+# ele_ids = np.array(list(mesh.elements.keys()), dtype=float)
+# ele_ids_map = dict()
+# for i in range(len(ele_ids)):
+# ele_ids_map[ele_ids[i]] = i
+#
+# data = -1 * np.ones(shape=(len(ele_ids), 1))
+#
+# for eid, lev in fracture_neighbors:
+# data[ele_ids_map[eid]] = lev
+#
+# # Separate base from extension
+# mesh_name, extension = os.path.splitext(healed_mesh)
+# # Initial new name
+# new_mesh_name = os.path.join(os.curdir, mesh_name + "_data" + extension)
+#
+# with open(new_mesh_name, "w") as fout:
+# mesh.write_ascii(fout)
+# mesh.write_element_data(fout, ele_ids, 'data', data)
+
+
+# def test_gmsh_dfn():
+# np.random.seed()
+# fractures = generate_fractures(geometry_dict, fracture_stats)
+# factory, mesh = make_mesh(geometry_dict, fractures, "geothermal_dnf")
+
+
+
+#def resolve_fractures_intersection(ipss):
+
+
+# def test_PowerLawSize():
+# powers = [0.8, 1.6, 2.9, 3, 3.2]
+# cmap = plt.get_cmap('gnuplot')
+# colors = [cmap(i) for i in np.linspace(0, 1, len(powers))]
+#
+# fig = plt.figure(figsize = (16, 9))
+# axes = fig.subplots(1, 2, sharey=True)
+# for i, power in enumerate(powers):
+# diam_range = (0.1, 10)
+# distr = frac.PowerLawSize(power, diam_range, 1000)
+# sizes = distr.sample(volume=1, size=10000)
+# sizes.sort()
+# x = np.geomspace(*diam_range, 30)
+# y = [distr.cdf(xv, diam_range) for xv in x]
+# z = [distr.ppf(yv, diam_range) for yv in y]
+# np.allclose(x, z)
+# axes[0].set_xscale('log')
+# axes[0].plot(x, y, label=str(power), c=colors[i])
+#
+# axes[0].plot(sizes[::100], np.linspace(0, 1, len(sizes))[::100], c=colors[i], marker='+')
+# sample_range = [0.1, 1]
+# x1 = np.geomspace(*sample_range, 200)
+# y1 = [distr.cdf(xv, sample_range) for xv in x1]
+# axes[1].set_xscale('log')
+# axes[1].plot(x1, y1, label=str(power))
+# fig.legend()
+# plt.show()
+
+def make_brep(geometry_dict, fractures: fr_set.Fracture, brep_name: str):
+ """
+ Create the BREP file from a list of fractures using the brep writer interface.
+ """
+ #fracture_mesh_step = geometry_dict['fracture_mesh_step']
+ #dimensions = geometry_dict["box_dimensions"]
+
+ print("n fractures:", len(fractures))
+
+ faces = []
+ for i, fr in enumerate(fractures):
+ #ref_fr_points = np.array([[1.0, 1.0, 0.0], [1.0, -1.0, 0.0], [-1.0, -1.0, 0.0], [-1.0, 1.0, 0.0]]) # polovina
+ ref_fr_points = fr_set.RectangleShape()._points
+ frac_points = fr.transform(ref_fr_points)
+ vtxs = [bw.Vertex(p) for p in frac_points]
+ vtxs.append(vtxs[0])
+ edges = [bw.Edge(a, b) for a, b in zip(vtxs[:-1], vtxs[1:])]
+ face = bw.Face(edges)
+ faces.append(face)
+
+ comp = bw.Compound(faces)
+ loc = Transform([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]])
+ with open(brep_name, "w") as f:
+ bw.write_model(f, comp, loc)
+
+
+
+def compute_intersections(fractures: fr_set.Fracture):
+ surface = []
+ fracs = []
+ edges = []
+ n_fr = len(fractures)
+
+ for fracture in fractures:
+ frac_plane = FP.FracPlane(fracture)
+ fracs.append(frac_plane)
+ surface.append(frac_plane.surface)
+
+ p = np.array(surface).argsort()
+ tolerance = 10
+ for i in p:
+ for j in p[i + 1:n_fr]: # may be reduced to relevant adepts
+ frac_isec = FIC.FracIsec(fractures[i],fractures[j])
+ points_A, points_B = frac_isec._get_points(tolerance)
+ possible_colision = FIC.FracIsec.colision_indicator(fractures[i], fractures[j], tolerance)
+
+ if possible_colision or frac_isec.have_colision:
+ print(f"collision: {frac_isec.fracture_A.id}, {frac_isec.fracture_B.id}")
+ assert not possible_colision or frac_isec.have_colision
+
+ if len(points_A) > 0:
+ va1 = bw.Vertex(points_A[0,:])
+ if points_A.shape[0] == 2:
+ va2 = bw.Vertex(points_A[1,:])
+ ea1 = bw.Edge(va1, va2)
+
+ if len(points_B) > 0:
+ vb1 = bw.Vertex(points_B[0, :])
+ if points_B.shape[0] == 2:
+ vb2 = bw.Vertex(points_B[1, :])
+ eb1 = bw.Edge(vb1, vb2)
+
+
+def check_duplicities(fi,fj,coor,vertices,tol):
+
+ duplicity_with = -1
+ duplicity_with = fi._check_duplicity(coor,tol,duplicity_with)
+ duplicity_with = fj._check_duplicity(coor, tol,duplicity_with)
+
+ for fracs in fi.isecs:
+ if duplicity_with == -1:
+ for ids in fracs:
+ if vertices[ids].check_duplicity(coor, tol) == True:
+ duplicity_with = ids
+ break
+
+
+class Fractures:
+ """
+ Stub of the class for fracture network simplification.
+ New approach should be:
+ - 2D meshing by GMSH
+ - Healing with specific processing to deal properties of merged fractures.
+ """
+ # regularization of 2d fractures
+ def __init__(self, fractures, epsilon):
+ self.epsilon = epsilon
+ self.fractures = fractures
+ self.points = []
+ self.lines = []
+ self.pt_boxes = []
+ self.line_boxes = []
+ self.pt_bih = None
+ self.line_bih = None
+ self.fracture_ids = []
+ # Maps line to its fracture.
+
+ self.make_lines()
+ self.make_bihs()
+
+ def make_lines(self):
+ # sort from large to small fractures
+ self.fractures.sort(key=lambda fr:fr.rx, reverse=True)
+ base_line = np.array([[-0.5, 0, 0], [0.5, 0, 0]])
+ for i_fr, fr in enumerate(self.fractures):
+ line = FisherOrientation.rotate(base_line * fr.rx, np.array([0, 0, 1]), fr.shape_angle)
+ line += fr.center
+ i_pt = len(self.points)
+ self.points.append(line[0])
+ self.points.append(line[1])
+ self.lines.append((i_pt, i_pt+1))
+ self.fracture_ids.append(i_fr)
+
+ def get_lines(self, fr_range):
+ lines = {}
+ fr_min, fr_max = fr_range
+ for i, (line, fr) in enumerate(zip(self.lines, self.fractures)):
+ if fr_min <= fr.rx < fr_max:
+ lines[i] = [self.points[p][:2] for p in line]
+ return lines
+
+ def make_bihs(self):
+ import bih
+ shift = np.array([self.epsilon, self.epsilon, 0])
+ for line in self.lines:
+ pt0, pt1 = self.points[line[0]], self.points[line[1]]
+ b0 = [(pt0 - shift).tolist(), (pt0 + shift).tolist()]
+ b1 = [(pt1 - shift).tolist(), (pt1 + shift).tolist()]
+ box_pt0 = bih.AABB(b0)
+ box_pt1 = bih.AABB(b1)
+ line_box = bih.AABB(b0 + b1)
+ self.pt_boxes.extend([box_pt0, box_pt1])
+ self.line_boxes.append(line_box)
+ self.pt_bih = bih.BIH()
+ self.pt_bih.add_boxes(self.pt_boxes)
+ self.line_bih = bih.BIH()
+ self.line_bih.add_boxes(self.line_boxes)
+ self.pt_bih.construct()
+ self.line_bih.construct()
+
+ def find_root(self, i_pt):
+ i = i_pt
+ while self.pt_map[i] != i:
+ i = self.pt_map[i]
+ root = i
+ i = i_pt
+ while self.pt_map[i] != i:
+ j = self.pt_map[i]
+ self.pt_map[i] = root
+ i = j
+ return root
+
+ def snap_to_line(self, pt, pt0, pt1):
+ v = pt1 - pt0
+ v /= np.linalg.norm(v)
+ t = v @ (pt - pt0)
+ if 0 < t < 1:
+ projected = pt0 + t * v
+ if np.linalg.norm(projected - pt) < self.epsilon:
+ return projected
+ return pt
+
+
+
+ def simplify(self):
+ """
+ Kruskal algorithm is somehow used to avoid loops in line createion.
+ :return:
+ """
+ self.pt_map = list(range(len(self.points)))
+ for i_pt, point in enumerate(self.points):
+ pt = point.tolist()
+ for j_pt_box in self.pt_bih.find_point(pt):
+ if i_pt != j_pt_box and j_pt_box == self.pt_map[j_pt_box] and self.pt_boxes[j_pt_box].contains_point(pt):
+ self.pt_map[i_pt] = self.find_root(j_pt_box)
+ break
+ new_lines = []
+ new_fr_ids = []
+ for i_ln, ln in enumerate(self.lines):
+ pt0, pt1 = ln
+ pt0, pt1 = self.find_root(pt0), self.find_root(pt1)
+ if pt0 != pt1:
+ new_lines.append((pt0, pt1))
+ new_fr_ids.append(self.fracture_ids[i_ln])
+ self.lines = new_lines
+ self.fracture_ids = new_fr_ids
+
+ for i_pt, point in enumerate(self.points):
+ if self.pt_map[i_pt] == i_pt:
+ pt = point.tolist()
+ for j_line in self.line_bih.find_point(pt):
+ line = self.lines[j_line]
+ if i_pt != line[0] and i_pt != line[1] and self.line_boxes[j_line].contains_point(pt):
+ pt0, pt1 = self.points[line[0]], self.points[line[1]]
+ self.points[i_pt] = self.snap_to_line(point, pt0, pt1)
+ break
+
+ def line_fragment(self, i_ln, j_ln):
+ """
+ Compute intersection of the two lines and if its position is well in interior
+ of both lines, benote it as the fragmen point for both lines.
+ """
+ pt0i, pt1i = (self.points[ipt] for ipt in self.lines[i_ln])
+ pt0j, pt1j = (self.points[ipt] for ipt in self.lines[j_ln])
+ A = np.stack([pt1i - pt0i, -pt1j + pt0j], axis=1)
+ b = -pt0i + pt0j
+ ti, tj = np.linalg.solve(A, b)
+ if self.epsilon <= ti <= 1 - self.epsilon and self.epsilon <= tj <= 1 - self.epsilon:
+ X = pt0i + ti * (pt1i - pt0i)
+ ix = len(self.points)
+ self.points.append(X)
+ self._fragment_points[i_ln].append((ti, ix))
+ self._fragment_points[j_ln].append((tj, ix))
+
+ def fragment(self):
+ """
+ Fragment fracture lines, update map from new line IDs to original fracture IDs.
+ :return:
+ """
+ new_lines = []
+ new_fracture_ids = []
+ self._fragment_points = [[] for l in self.lines]
+ for i_ln, line in enumerate(self.lines):
+ for j_ln in self.line_bih.find_box(self.line_boxes[i_ln]):
+ if j_ln > i_ln:
+ self.line_fragment(i_ln, j_ln)
+ # i_ln line is complete, we can fragment it
+ last_pt = self.lines[i_ln][0]
+ fr_id = self.fracture_ids[i_ln]
+ for t, ix in sorted(self._fragment_points[i_ln]):
+ new_lines.append(last_pt, ix)
+ new_fracture_ids.append(fr_id)
+ last_pt = ix
+ new_lines.append(last_pt, self.lines[i_ln][1])
+ new_fracture_ids.append(fr_id)
+ self.lines = new_lines
+ self.fracture_ids = new_fracture_ids
+
+ # def unit_square_vtxs():
+ # return np.array([
+ # [-0.5, -0.5, 0],
+ # [0.5, -0.5, 0],
+ # [0.5, 0.5, 0],
+ # [-0.5, 0.5, 0]])
+
+ # def compute_transformed_shapes(self):
+ # n_frac = len(self.fractures)
+ #
+ # unit_square = unit_square_vtxs()
+ # z_axis = np.array([0, 0, 1])
+ # squares = np.tile(unit_square[None, :, :], (n_frac, 1, 1))
+ # center = np.empty((n_frac, 3))
+ # trans_matrix = np.empty((n_frac, 3, 3))
+ # for i, fr in enumerate(self.fractures):
+ # vtxs = squares[i, :, :]
+ # vtxs[:, 1] *= fr.aspect
+ # vtxs[:, :] *= fr.r
+ # vtxs = FisherOrientation.rotate(vtxs, z_axis, fr.shape_angle)
+ # vtxs = FisherOrientation.rotate(vtxs, fr.rotation_axis, fr.rotation_angle)
+ # vtxs += fr.centre
+ # squares[i, :, :] = vtxs
+ #
+ # center[i, :] = fr.centre
+ # u_vec = vtxs[1] - vtxs[0]
+ # u_vec /= (u_vec @ u_vec)
+ # v_vec = vtxs[2] - vtxs[0]
+ # u_vec /= (v_vec @ v_vec)
+ # w_vec = FisherOrientation.rotate(z_axis, fr.rotation_axis, fr.rotation_angle)
+ # trans_matrix[i, :, 0] = u_vec
+ # trans_matrix[i, :, 1] = v_vec
+ # trans_matrix[i, :, 2] = w_vec
+ # self.squares = squares
+ # self.center = center
+ # self.trans_matrix = trans_matrix
+ #
+ # def snap_vertices_and_edges(self):
+ # n_frac = len(self.fractures)
+ # epsilon = 0.05 # relaitve to the fracture
+ # min_unit_fr = np.array([0 - epsilon, 0 - epsilon, 0 - epsilon])
+ # max_unit_fr = np.array([1 + epsilon, 1 + epsilon, 0 + epsilon])
+ # cos_limit = 1 / np.sqrt(1 + (epsilon / 2) ** 2)
+ #
+ # all_points = self.squares.reshape(-1, 3)
+ #
+ # isec_condidates = []
+ # wrong_angle = np.zeros(n_frac)
+ # for i, fr in enumerate(self.fractures):
+ # if wrong_angle[i] > 0:
+ # isec_condidates.append(None)
+ # continue
+ # projected = all_points - self.center[i, :][None, :]
+ # projected = np.reshape(projected @ self.trans_matrix[i, :, :], (-1, 4, 3))
+ #
+ # # get bounding boxes in the loc system
+ # min_projected = np.min(projected, axis=1) # shape (N, 3)
+ # max_projected = np.max(projected, axis=1)
+ # # flag fractures that are out of the box
+ # flag = np.any(np.logical_or(min_projected > max_unit_fr[None, :], max_projected < min_unit_fr[None, :]),
+ # axis=1)
+ # flag[i] = 1 # omit self
+ # candidates = np.nonzero(flag == 0)[0] # indices of fractures close to 'fr'
+ # isec_condidates.append(candidates)
+ # # print("fr: ", i, candidates)
+ # for i_fr in candidates:
+ # if i_fr > i:
+ # cos_angle_of_normals = self.trans_matrix[i, :, 2] @ self.trans_matrix[i_fr, :, 2]
+ # if cos_angle_of_normals > cos_limit:
+ # wrong_angle[i_fr] = 1----
+ # print("wrong_angle: ", i, i_fr)
+ #
+ # # atract vertices
+ # fr = projected[i_fr]
+ # flag = np.any(np.logical_or(fr > max_unit_fr[None, :], fr < min_unit_fr[None, :]), axis=1)
+ # print(np.nonzero(flag == 0))
+
+
+def fr_intersect(fractures):
+ """
+ 1. create fracture shape vertices (rotated, translated) square
+ - create vertices of the unit shape
+ - use FisherOrientation.rotate
+ 2. intersection of a line with plane/square
+ 3. intersection of two squares:
+ - length of the intersection
+ - angle
+ -
+ :param fractures:
+ :return:
+ """
+
+ # project all points to all fractures (getting local coordinates on the fracture system)
+ # fracture system axis:
+ # u_vec = vtxs[1] - vtxs[0]
+ # v_vec = vtxs[2] - vtxs[0]
+ # w_vec ... unit normal
+ # fractures with angle that their max distance in the case of intersection
+ # is not greater the 'epsilon'
+
+
+
diff --git a/tests/stochastic/test_dfn.py b/tests/stochastic/test_dfn.py
index fcc28d6..47718ec 100644
--- a/tests/stochastic/test_dfn.py
+++ b/tests/stochastic/test_dfn.py
@@ -1,6 +1,16 @@
"""
+Test DFN stochastic description.
+TODO:
+- test functionality of individual classes
+- test Familly and Population interface
+
+- test stochastic properties of individual distributions
+- visulalization tests for fracture distribution
+- calculation of estimates of stochastic parameters from the sample(s)
+ (way to inversions)
"""
+
import pytest
import os
import attr
@@ -8,18 +18,12 @@
import collections
# import matplotlib.pyplot as plt
-# from bgem
-from bgem.gmsh import gmsh
-from bgem.gmsh import options as gmsh_options
-from bgem.gmsh import field as gmsh_field
-from bgem.stochastic import frac_plane as FP
-from bgem.stochastic import frac_isec as FIC
-from bgem.stochastic import fracture
-from bgem.bspline import brep_writer as bw
-from bgem import Transform
+from bgem.stochastic import dfn
from fixtures import sandbox_fname
#script_dir = os.path.dirname(os.path.realpath(__file__))
+
+
geometry_dict = {
'box_dimensions': [100, 100, 100],
'center_depth': 5000,
@@ -29,60 +33,21 @@
'well_effective_radius': 10,
'well_openning': [-50, 50]}
-fracture_stats = [
- {'concentration': 17.8,
- 'name': 'NS',
- 'p_32': 0.094,
- 'plunge': 1,
- 'power': 2.5,
- 'r_max': 564,
- 'r_min': 0.038,
- 'trend': 292},
- {'concentration': 14.3,
- 'name': 'NE',
- 'p_32': 0.163,
- 'plunge': 2,
- 'power': 2.7,
- 'r_max': 564,
- 'r_min': 0.038,
- 'trend': 326},
- {'concentration': 12.9,
- 'name': 'NW',
- 'p_32': 0.098,
- 'plunge': 6,
- 'power': 3.1,
- 'r_max': 564,
- 'r_min': 0.038,
- 'trend': 60},
- {'concentration': 14.0,
- 'name': 'EW',
- 'p_32': 0.039,
- 'plunge': 2,
- 'power': 3.1,
- 'r_max': 564,
- 'r_min': 0.038,
- 'trend': 15},
- {'concentration': 15.2,
- 'name': 'HZ',
- 'p_32': 0.141,
- 'plunge': 86,
- 'power': 2.38,
- 'r_max': 564,
- 'r_min': 0.038,
- 'trend': 5}]
-
+from fixtures import fracture_stats
-# TODO:
-# - enforce creation of empty physical groups, or creation of empty regions in the flow input
-# - speedup mechanics
-@attr.s(auto_attribs=True)
-class ValueDescription:
- time: float
- position: str
- quantity: str
- unit: str
+def test_PowerLawSize():
+ #dfn.PowerLawSize.from_mean_area()
+ pass
+def test_UniformBoxPosition():
+ center = [-10, -20, -40]
+ dimensions = [20, 30, 40]
+ pos = dfn.UniformBoxPosition(dimensions, center)
+ assert pos.volume == 24000
+ unit_pos_sample = (pos.sample(1000) - center) / dimensions
+ assert np.all(unit_pos_sample > -0.5)
+ assert np.all(unit_pos_sample < 0.5)
def to_polar(x, y, z):
rho = np.sqrt(x ** 2 + y ** 2)
@@ -96,7 +61,7 @@ def plot_fr_orientation(fractures):
family_dict = collections.defaultdict(list)
for fr in fractures:
x, y, z = \
- fracture.FisherOrientation.rotate(np.array([0, 0, 1]), axis=fr.rotation_axis, angle=fr.rotation_angle)[0]
+ dfn.FisherOrientation.rotate(np.array([0, 0, 1]), axis=fr.rotation_axis, angle=fr.rotation_angle)[0]
family_dict[fr.region].append([
to_polar(z, y, x),
to_polar(z, x, -y),
@@ -124,433 +89,5 @@ def plot_fr_orientation(fractures):
plt.close(fig)
# plt.show()
-
-def generate_uniform(statistics, n_frac_limit):
- # generate fracture set
- box_size = 100
- fracture_box = 3 * [box_size]
- volume = np.product(fracture_box)
- pop = fracture.Population(volume)
- pop.initialize(statistics)
- pop.set_sample_range([1, box_size], sample_size=n_frac_limit)
- print("total mean size: ", pop.mean_size())
- pos_gen = fracture.UniformBoxPosition(fracture_box)
- fractures = pop.sample(pos_distr=pos_gen, keep_nonempty=True)
- # fracture.fr_intersect(fractures)
-
- return fractures
-
-
-# def config_fracture_regions(used_families):
-# for model in ["hm_params", "th_params", "th_params_ref"]:
-# model_dict = config_dict[model]
-# model_dict["fracture_regions"] = list(used_families)
-# model_dict["left_well_fracture_regions"] = [".{}_left_well".format(f) for f in used_families]
-# model_dict["right_well_fracture_regions"] = [".{}_right_well".format(f) for f in used_families]
-
-
-def create_fractures_rectangles(gmsh_geom, fractures, base_shape: 'ObjectSet'):
- # From given fracture date list 'fractures'.
- # transform the base_shape to fracture objects
- # fragment fractures by their intersections
- # return dict: fracture.region -> GMSHobject with corresponding fracture fragments
- shapes = []
- for i, fr in enumerate(fractures):
- shape = base_shape.copy()
- print("fr: ", i, "tag: ", shape.dim_tags)
- shape = shape.scale([fr.rx, fr.ry, 1]) \
- .rotate(axis=fr.rotation_axis, angle=fr.rotation_angle) \
- .translate(fr.center) \
- .set_region(fr.region)
-
- shapes.append(shape)
-
- fracture_fragments = gmsh_geom.fragment(*shapes)
- return fracture_fragments
-
-
-def create_fractures_polygons(gmsh_geom, fractures):
- # From given fracture date list 'fractures'.
- # transform the base_shape to fracture objects
- # fragment fractures by their intersections
- # return dict: fracture.region -> GMSHobject with corresponding fracture fragments
- frac_obj = fracture.Fractures(fractures)
- frac_obj.snap_vertices_and_edges()
- shapes = []
- for fr, square in zip(fractures, frac_obj.squares):
- shape = gmsh_geom.make_polygon(square).set_region(fr.region)
- shapes.append(shape)
-
- fracture_fragments = gmsh_geom.fragment(*shapes)
- return fracture_fragments
-
-
-def make_mesh(geometry_dict, fractures: fracture.Fracture, mesh_name: str):
- """
- Create the GMSH mesh from a list of fractures using the bgem.gmsh interface.
- """
- fracture_mesh_step = geometry_dict['fracture_mesh_step']
- dimensions = geometry_dict["box_dimensions"]
- well_z0, well_z1 = geometry_dict["well_openning"]
- well_r = geometry_dict["well_effective_radius"]
- well_dist = geometry_dict["well_distance"]
-
- factory = gmsh.GeometryOCC(mesh_name, verbose=True)
- gopt = gmsh_options.Geometry()
- gopt.Tolerance = 0.0001
- gopt.ToleranceBoolean = 0.001
- # gopt.MatchMeshTolerance = 1e-1
-
- # Main box
- box = factory.box(dimensions).set_region("box")
- side_z = factory.rectangle([dimensions[0], dimensions[1]])
- side_y = factory.rectangle([dimensions[0], dimensions[2]])
- side_x = factory.rectangle([dimensions[2], dimensions[1]])
- sides = dict(
- side_z0=side_z.copy().translate([0, 0, -dimensions[2] / 2]),
- side_z1=side_z.copy().translate([0, 0, +dimensions[2] / 2]),
- side_y0=side_y.copy().translate([0, 0, -dimensions[1] / 2]).rotate([-1, 0, 0], np.pi / 2),
- side_y1=side_y.copy().translate([0, 0, +dimensions[1] / 2]).rotate([-1, 0, 0], np.pi / 2),
- side_x0=side_x.copy().translate([0, 0, -dimensions[0] / 2]).rotate([0, 1, 0], np.pi / 2),
- side_x1=side_x.copy().translate([0, 0, +dimensions[0] / 2]).rotate([0, 1, 0], np.pi / 2)
- )
- for name, side in sides.items():
- side.modify_regions(name)
-
- b_box = box.get_boundary().copy()
-
- # two vertical cut-off wells, just permeable part
- left_center = [-well_dist / 2, 0, 0]
- right_center = [+well_dist / 2, 0, 0]
- left_well = factory.cylinder(well_r, axis=[0, 0, well_z1 - well_z0]) \
- .translate([0, 0, well_z0]).translate(left_center)
- right_well = factory.cylinder(well_r, axis=[0, 0, well_z1 - well_z0]) \
- .translate([0, 0, well_z0]).translate(right_center)
- b_right_well = right_well.get_boundary()
- b_left_well = left_well.get_boundary()
-
- print("n fractures:", len(fractures))
- fractures = create_fractures_rectangles(factory, fractures, factory.rectangle())
- # fractures = create_fractures_polygons(factory, fractures)
- fractures_group = factory.group(*fractures)
- # fractures_group = fractures_group.remove_small_mass(fracture_mesh_step * fracture_mesh_step / 10)
-
- # drilled box and its boundary
- box_drilled = box.cut(left_well, right_well)
-
- # fractures, fragmented, fractures boundary
- print("cut fractures by box without wells")
- fractures_group = fractures_group.intersect(box_drilled.copy())
- print("fragment fractures")
- box_fr, fractures_fr = factory.fragment(box_drilled, fractures_group)
- print("finish geometry")
- b_box_fr = box_fr.get_boundary()
- b_left_r = b_box_fr.select_by_intersect(b_left_well).set_region(".left_well")
- b_right_r = b_box_fr.select_by_intersect(b_right_well).set_region(".right_well")
-
- box_all = []
- for name, side_tool in sides.items():
- isec = b_box_fr.select_by_intersect(side_tool)
- box_all.append(isec.modify_regions("." + name))
- box_all.extend([box_fr, b_left_r, b_right_r])
-
- b_fractures = factory.group(*fractures_fr.get_boundary_per_region())
- b_fractures_box = b_fractures.select_by_intersect(b_box).modify_regions("{}_box")
- b_fr_left_well = b_fractures.select_by_intersect(b_left_well).modify_regions("{}_left_well")
- b_fr_right_well = b_fractures.select_by_intersect(b_right_well).modify_regions("{}_right_well")
- b_fractures = factory.group(b_fr_left_well, b_fr_right_well, b_fractures_box)
- mesh_groups = [*box_all, fractures_fr, b_fractures]
-
- print(fracture_mesh_step)
- # fractures_fr.set_mesh_step(fracture_mesh_step)
-
- factory.keep_only(*mesh_groups)
- factory.remove_duplicate_entities()
- factory.write_brep()
-
- min_el_size = fracture_mesh_step / 10
- fracture_el_size = np.max(dimensions) / 20
- max_el_size = np.max(dimensions) / 8
-
- fracture_el_size = gmsh_field.constant(fracture_mesh_step, 10000)
- frac_el_size_only = gmsh_field.restrict(fracture_el_size, fractures_fr, add_boundary=True)
- gmsh_field.set_mesh_step_field(frac_el_size_only)
-
- mesh = gmsh_options.Mesh()
- # mesh.Algorithm = options.Algorithm2d.MeshAdapt # produce some degenerated 2d elements on fracture boundaries ??
- # mesh.Algorithm = options.Algorithm2d.Delaunay
- # mesh.Algorithm = options.Algorithm2d.FrontalDelaunay
- # mesh.Algorithm3D = options.Algorithm3d.Frontal
- # mesh.Algorithm3D = options.Algorithm3d.Delaunay
- mesh.ToleranceInitialDelaunay = 0.01
- # mesh.ToleranceEdgeLength = fracture_mesh_step / 5
- mesh.CharacteristicLengthFromPoints = True
- mesh.CharacteristicLengthFromCurvature = True
- mesh.CharacteristicLengthExtendFromBoundary = 2
- mesh.CharacteristicLengthMin = min_el_size
- mesh.CharacteristicLengthMax = max_el_size
- mesh.MinimumCirclePoints = 6
- mesh.MinimumCurvePoints = 2
-
- # factory.make_mesh(mesh_groups, dim=2)
- factory.make_mesh(mesh_groups)
- factory.write_mesh(format=gmsh.MeshFormat.msh2)
- os.rename(mesh_name + ".msh2", mesh_name + ".msh")
- factory.show()
-
-
-# def find_fracture_neigh(mesh, fract_regions, n_levels=1):
-# """
-# Find neighboring elements in the bulk rock in the vicinity of the fractures.
-# Creates several levels of neighbors.
-# :param mesh: GmshIO mesh object
-# :param fract_regions: list of physical names of the fracture regions
-# :param n_levels: number of layers of elements from the fractures
-# :return:
-# """
-#
-# # make node -> element map
-# node_els = collections.defaultdict(set)
-# max_ele_id = 0
-# for eid, e in mesh.elements.items():
-# max_ele_id = max(max_ele_id, eid)
-# type, tags, node_ids = e
-# for n in node_ids:
-# node_els[n].add(eid)
-#
-# print("max_ele_id = %d" % max_ele_id)
-#
-# # select ids of fracture regions
-# fr_regs = fract_regions
-# # fr_regs = []
-# # for fr in fract_regions:
-# # rid, dim = mesh.physical['fr']
-# # assert dim == 2
-# # fr_regs.append(rid)
-#
-# # for n in node_els:
-# # if len(node_els[n]) > 1:
-# # print(node_els[n])
-#
-# visited_elements = np.zeros(shape=(max_ele_id+1, 1), dtype=int)
-# fracture_neighbors = []
-#
-# def find_neighbors(mesh, element, level, fracture_neighbors, visited_elements):
-# """
-# Auxiliary function which finds bulk neighbor elements to 'element' and
-# saves them to list 'fracture_neighbors'.
-# 'visited_elements' keeps track of already investigated elements
-# 'level' is number of layer from the fractures in which we search
-# """
-# type, tags, node_ids = element
-# ngh_elements = common_elements(node_ids, mesh, node_els, True)
-# for ngh_eid in ngh_elements:
-# if visited_elements[ngh_eid] > 0:
-# continue
-# ngh_ele = mesh.elements[ngh_eid]
-# ngh_type, ngh_tags, ngh_node_ids = ngh_ele
-# if ngh_type == 4: # if they are bulk elements and not already added
-# visited_elements[ngh_eid] = 1
-# fracture_neighbors.append((ngh_eid, level)) # add them
-#
-# # ele type: 1 - line, 2-triangle, 4-tetrahedron, 15-node
-# # find the first layer of elements neighboring to fractures
-# for eid, e in mesh.elements.items():
-# type, tags, node_ids = e
-# if type == 2: # fracture elements
-# visited_elements[eid] = 1
-# if tags[0] not in fr_regs: # is element in fracture region ?
-# continue
-# find_neighbors(mesh, element=e, level=0, fracture_neighbors=fracture_neighbors,
-# visited_elements=visited_elements)
-#
-# # find next layers of elements from the first layer
-# for i in range(1, n_levels):
-# for eid, lev in fracture_neighbors:
-# if lev < i:
-# e = mesh.elements[eid]
-# find_neighbors(mesh, element=e, level=i, fracture_neighbors=fracture_neighbors,
-# visited_elements=visited_elements)
-#
-# return fracture_neighbors
-#
-#
-# def common_elements(node_ids, mesh, node_els, subset=False, max=1000):
-# """
-# Finds elements common to the given nodes.
-# :param node_ids: Ids of the nodes for which we look for common elements.
-# :param mesh:
-# :param node_els: node -> element map
-# :param subset: if true, it returns all the elements that are adjacent to at least one of the nodes
-# if false, it returns all the elements adjacent to all the nodes
-# :param max:
-# :return:
-# """
-# # Generates active elements common to given nodes.
-# node_sets = [node_els[n] for n in node_ids]
-# if subset:
-# elements = list(set(itertools.chain.from_iterable(node_sets))) # remove duplicities
-# else:
-# elements = set.intersection(*node_sets)
-#
-# if len(elements) > max:
-# print("Too many connected elements:", len(elements), " > ", max)
-# for eid in elements:
-# type, tags, node_ids = mesh.elements[eid]
-# print(" eid: ", eid, node_ids)
-# # return elements
-# return active(mesh, elements)
-#
-#
-# def active(mesh, element_iterable):
-# for eid in element_iterable:
-# if eid in mesh.elements:
-# yield eid
-
-# def test_fracture_neighbors(config_dict):
-# """
-# Function that tests finding fracture neighbors.
-# It outputs mesh data - level per element.
-# :param config_dict:
-# :return:
-# """
-# setup_dir(config_dict, clean=True)
-# mesh_repo = config_dict.get('mesh_repository', None)
-# if mesh_repo:
-# healed_mesh = sample_mesh_repository(mesh_repo)
-# config_fracture_regions(config_dict["fracture_regions"])
-# else:
-# fractures = generate_fractures(config_dict)
-# # plot_fr_orientation(fractures)
-# healed_mesh = prepare_mesh(config_dict, fractures)
-# print("Created mesh: " + os.path.basename(healed_mesh))
-#
-# mesh = gmsh_io.GmshIO(healed_mesh)
-# fracture_neighbors = find_fracture_neigh(mesh, ["fr"], n_levels=3)
-#
-# ele_ids = np.array(list(mesh.elements.keys()), dtype=float)
-# ele_ids_map = dict()
-# for i in range(len(ele_ids)):
-# ele_ids_map[ele_ids[i]] = i
-#
-# data = -1 * np.ones(shape=(len(ele_ids), 1))
-#
-# for eid, lev in fracture_neighbors:
-# data[ele_ids_map[eid]] = lev
-#
-# # Separate base from extension
-# mesh_name, extension = os.path.splitext(healed_mesh)
-# # Initial new name
-# new_mesh_name = os.path.join(os.curdir, mesh_name + "_data" + extension)
-#
-# with open(new_mesh_name, "w") as fout:
-# mesh.write_ascii(fout)
-# mesh.write_element_data(fout, ele_ids, 'data', data)
-
-
-# def test_gmsh_dfn():
-# np.random.seed()
-# fractures = generate_fractures(geometry_dict, fracture_stats)
-# factory, mesh = make_mesh(geometry_dict, fractures, "geothermal_dnf")
-
-
-#@pytest.mark.skip
-def test_brep_dfn():
- np.random.seed(123)
- fractures = generate_uniform(fracture_stats, n_frac_limit=50)
- for i, f in enumerate(fractures):
- f.id = i
- make_brep(geometry_dict, fractures, sandbox_fname("test_dfn", "brep"))
-
- ipps = compute_intersections(fractures)
- #resolve_fractures_intersection(ipss)
-
- print('brep_test_done')
-
- # TODO:
- # dfn = dfn.DFN(fractures)
- # dfn_simplified = dfn.simplify()
- # brep = dfn_simplified.make_brep()
-
-
-#def resolve_fractures_intersection(ipss):
-
-
-
-def make_brep(geometry_dict, fractures: fracture.Fracture, brep_name: str):
- """
- Create the BREP file from a list of fractures using the brep writer interface.
- """
- #fracture_mesh_step = geometry_dict['fracture_mesh_step']
- #dimensions = geometry_dict["box_dimensions"]
-
- print("n fractures:", len(fractures))
-
- faces = []
- for i, fr in enumerate(fractures):
- #ref_fr_points = np.array([[1.0, 1.0, 0.0], [1.0, -1.0, 0.0], [-1.0, -1.0, 0.0], [-1.0, 1.0, 0.0]]) # polovina
- ref_fr_points = fracture.SquareShape()._points
- frac_points = fr.transform(ref_fr_points)
- vtxs = [bw.Vertex(p) for p in frac_points]
- vtxs.append(vtxs[0])
- edges = [bw.Edge(a, b) for a, b in zip(vtxs[:-1], vtxs[1:])]
- face = bw.Face(edges)
- faces.append(face)
-
- comp = bw.Compound(faces)
- loc = Transform([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]])
- with open(brep_name, "w") as f:
- bw.write_model(f, comp, loc)
-
-
-
-def compute_intersections(fractures: fracture.Fracture):
- surface = []
- fracs = []
- edges = []
- n_fr = len(fractures)
-
- for fracture in fractures:
- frac_plane = FP.FracPlane(fracture)
- fracs.append(frac_plane)
- surface.append(frac_plane.surface)
-
- p = np.array(surface).argsort()
- tolerance = 10
- for i in p:
- for j in p[i + 1:n_fr]: # may be reduced to relevant adepts
- frac_isec = FIC.FracIsec(fractures[i],fractures[j])
- points_A, points_B = frac_isec._get_points(tolerance)
- possible_colision = FIC.FracIsec.colision_indicator(fractures[i], fractures[j], tolerance)
-
- if possible_colision or frac_isec.have_colision:
- print(f"collision: {frac_isec.fracture_A.id}, {frac_isec.fracture_B.id}")
- assert not possible_colision or frac_isec.have_colision
-
- if len(points_A) > 0:
- va1 = bw.Vertex(points_A[0,:])
- if points_A.shape[0] == 2:
- va2 = bw.Vertex(points_A[1,:])
- ea1 = bw.Edge(va1, va2)
-
- if len(points_B) > 0:
- vb1 = bw.Vertex(points_B[0, :])
- if points_B.shape[0] == 2:
- vb2 = bw.Vertex(points_B[1, :])
- eb1 = bw.Edge(vb1, vb2)
-
-
-def check_duplicities(fi,fj,coor,vertices,tol):
-
- duplicity_with = -1
- duplicity_with = fi._check_duplicity(coor,tol,duplicity_with)
- duplicity_with = fj._check_duplicity(coor, tol,duplicity_with)
-
- for fracs in fi.isecs:
- if duplicity_with == -1:
- for ids in fracs:
- if vertices[ids].check_duplicity(coor, tol) == True:
- duplicity_with = ids
- break
-
-
+def test_population():
+ pass
diff --git a/tests/stochastic/test_fr_mesh.py b/tests/stochastic/test_fr_mesh.py
new file mode 100644
index 0000000..f4e592c
--- /dev/null
+++ b/tests/stochastic/test_fr_mesh.py
@@ -0,0 +1,339 @@
+import os
+import numpy as np
+from bgem.stochastic import Population, UniformBoxPosition, geometry_gmsh, geometry_brep_writer, FractureSet
+from bgem.gmsh import gmsh, options as gmsh_options, field as gmsh_field
+from fixtures import sandbox_fname
+from fixtures import fracture_stats
+
+#script_dir = os.path.dirname(os.path.realpath(__file__))
+
+
+def make_mesh(geometry_dict, fractures: FractureSet, mesh_name: str):
+ """
+ Create the GMSH mesh from a list of fractures using the bgem.gmsh interface.
+ """
+ fracture_mesh_step = geometry_dict['fracture_mesh_step']
+ dimensions = geometry_dict["box_dimensions"]
+ well_z0, well_z1 = geometry_dict["well_openning"]
+ well_r = geometry_dict["well_effective_radius"]
+ well_dist = geometry_dict["well_distance"]
+
+ factory = gmsh.GeometryOCC(mesh_name, verbose=True)
+ gopt = gmsh_options.Geometry()
+ gopt.Tolerance = 0.0001
+ gopt.ToleranceBoolean = 0.001
+ # gopt.MatchMeshTolerance = 1e-1
+
+ # Main box
+ box = factory.box(dimensions).set_region("box")
+ side_z = factory.rectangle([dimensions[0], dimensions[1]])
+ side_y = factory.rectangle([dimensions[0], dimensions[2]])
+ side_x = factory.rectangle([dimensions[2], dimensions[1]])
+ sides = dict(
+ side_z0=side_z.copy().translate([0, 0, -dimensions[2] / 2]),
+ side_z1=side_z.copy().translate([0, 0, +dimensions[2] / 2]),
+ side_y0=side_y.copy().translate([0, 0, -dimensions[1] / 2]).rotate([-1, 0, 0], np.pi / 2),
+ side_y1=side_y.copy().translate([0, 0, +dimensions[1] / 2]).rotate([-1, 0, 0], np.pi / 2),
+ side_x0=side_x.copy().translate([0, 0, -dimensions[0] / 2]).rotate([0, 1, 0], np.pi / 2),
+ side_x1=side_x.copy().translate([0, 0, +dimensions[0] / 2]).rotate([0, 1, 0], np.pi / 2)
+ )
+ for name, side in sides.items():
+ side.modify_regions(name)
+
+ b_box = box.get_boundary().copy()
+
+ # two vertical cut-off wells, just permeable part
+ left_center = [-well_dist / 2, 0, 0]
+ right_center = [+well_dist / 2, 0, 0]
+ left_well = factory.cylinder(well_r, axis=[0, 0, well_z1 - well_z0]) \
+ .translate([0, 0, well_z0]).translate(left_center)
+ right_well = factory.cylinder(well_r, axis=[0, 0, well_z1 - well_z0]) \
+ .translate([0, 0, well_z0]).translate(right_center)
+ b_right_well = right_well.get_boundary()
+ b_left_well = left_well.get_boundary()
+
+ print("n fractures:", len(fractures))
+ fractures_group, region_map = geometry_gmsh(fractures, factory)
+ # fractures_group = fractures_group.remove_small_mass(fracture_mesh_step * fracture_mesh_step / 10)
+
+ # drilled box and its boundary
+ box_drilled = box.cut(left_well, right_well)
+
+ # fractures, fragmented, fractures boundary
+ print("cut fractures by box without wells")
+ fractures_group = fractures_group.intersect(box_drilled.copy())
+ print("fragment fractures")
+ box_fr, fractures_fr = factory.fragment(box_drilled, fractures_group)
+ print("finish geometry")
+ b_box_fr = box_fr.get_boundary()
+ b_left_r = b_box_fr.select_by_intersect(b_left_well).set_region(".left_well")
+ b_right_r = b_box_fr.select_by_intersect(b_right_well).set_region(".right_well")
+
+ box_all = []
+ for name, side_tool in sides.items():
+ isec = b_box_fr.select_by_intersect(side_tool)
+ box_all.append(isec.modify_regions("." + name))
+ box_all.extend([box_fr, b_left_r, b_right_r])
+
+ b_fractures = factory.group(*fractures_fr.get_boundary_per_region())
+ b_fractures_box = b_fractures.select_by_intersect(b_box).modify_regions("{}_box")
+ b_fr_left_well = b_fractures.select_by_intersect(b_left_well).modify_regions("{}_left_well")
+ b_fr_right_well = b_fractures.select_by_intersect(b_right_well).modify_regions("{}_right_well")
+ b_fractures = factory.group(b_fr_left_well, b_fr_right_well, b_fractures_box)
+ mesh_groups = [*box_all, fractures_fr, b_fractures]
+
+ print(fracture_mesh_step)
+ # fractures_fr.set_mesh_step(fracture_mesh_step)
+
+ factory.keep_only(*mesh_groups)
+ factory.remove_duplicate_entities()
+ factory.write_brep()
+
+ min_el_size = fracture_mesh_step / 10
+ fracture_el_size = np.max(dimensions) / 20
+ max_el_size = np.max(dimensions) / 8
+
+ fractures_fr.mesh_step(fracture_mesh_step)
+
+ mesh = gmsh_options.Mesh()
+ # mesh.Algorithm = options.Algorithm2d.MeshAdapt # produce some degenerated 2d elements on fracture boundaries ??
+ # mesh.Algorithm = options.Algorithm2d.Delaunay
+ # mesh.Algorithm = options.Algorithm2d.FrontalDelaunay
+ # mesh.Algorithm3D = options.Algorithm3d.Frontal
+ # mesh.Algorithm3D = options.Algorithm3d.Delaunay
+ mesh.ToleranceInitialDelaunay = 0.01
+ # mesh.ToleranceEdgeLength = fracture_mesh_step / 5
+ mesh.CharacteristicLengthFromPoints = True
+ mesh.CharacteristicLengthFromCurvature = True
+ mesh.CharacteristicLengthExtendFromBoundary = 2
+ mesh.CharacteristicLengthMin = min_el_size
+ mesh.CharacteristicLengthMax = max_el_size
+ mesh.MinimumCirclePoints = 6
+ mesh.MinimumCurvePoints = 2
+
+ # factory.make_mesh(mesh_groups, dim=2)
+ factory.make_mesh(mesh_groups)
+ factory.write_mesh(format=gmsh.MeshFormat.msh2)
+ os.rename(mesh_name + ".msh2", mesh_name + ".msh")
+ #factory.show()
+
+
+# def find_fracture_neigh(mesh, fract_regions, n_levels=1):
+# """
+# Find neighboring elements in the bulk rock in the vicinity of the fractures.
+# Creates several levels of neighbors.
+# :param mesh: GmshIO mesh object
+# :param fract_regions: list of physical names of the fracture regions
+# :param n_levels: number of layers of elements from the fractures
+# :return:
+# """
+#
+# # make node -> element map
+# node_els = collections.defaultdict(set)
+# max_ele_id = 0
+# for eid, e in mesh.elements.items():
+# max_ele_id = max(max_ele_id, eid)
+# type, tags, node_ids = e
+# for n in node_ids:
+# node_els[n].add(eid)
+#
+# print("max_ele_id = %d" % max_ele_id)
+#
+# # select ids of fracture regions
+# fr_regs = fract_regions
+# # fr_regs = []
+# # for fr in fract_regions:
+# # rid, dim = mesh.physical['fr']
+# # assert dim == 2
+# # fr_regs.append(rid)
+#
+# # for n in node_els:
+# # if len(node_els[n]) > 1:
+# # print(node_els[n])
+#
+# visited_elements = np.zeros(shape=(max_ele_id+1, 1), dtype=int)
+# fracture_neighbors = []
+#
+# def find_neighbors(mesh, element, level, fracture_neighbors, visited_elements):
+# """
+# Auxiliary function which finds bulk neighbor elements to 'element' and
+# saves them to list 'fracture_neighbors'.
+# 'visited_elements' keeps track of already investigated elements
+# 'level' is number of layer from the fractures in which we search
+# """
+# type, tags, node_ids = element
+# ngh_elements = common_elements(node_ids, mesh, node_els, True)
+# for ngh_eid in ngh_elements:
+# if visited_elements[ngh_eid] > 0:
+# continue
+# ngh_ele = mesh.elements[ngh_eid]
+# ngh_type, ngh_tags, ngh_node_ids = ngh_ele
+# if ngh_type == 4: # if they are bulk elements and not already added
+# visited_elements[ngh_eid] = 1
+# fracture_neighbors.append((ngh_eid, level)) # add them
+#
+# # ele type: 1 - line, 2-triangle, 4-tetrahedron, 15-node
+# # find the first layer of elements neighboring to fractures
+# for eid, e in mesh.elements.items():
+# type, tags, node_ids = e
+# if type == 2: # fracture elements
+# visited_elements[eid] = 1
+# if tags[0] not in fr_regs: # is element in fracture region ?
+# continue
+# find_neighbors(mesh, element=e, level=0, fracture_neighbors=fracture_neighbors,
+# visited_elements=visited_elements)
+#
+# # find next layers of elements from the first layer
+# for i in range(1, n_levels):
+# for eid, lev in fracture_neighbors:
+# if lev < i:
+# e = mesh.elements[eid]
+# find_neighbors(mesh, element=e, level=i, fracture_neighbors=fracture_neighbors,
+# visited_elements=visited_elements)
+#
+# return fracture_neighbors
+#
+#
+# def common_elements(node_ids, mesh, node_els, subset=False, max=1000):
+# """
+# Finds elements common to the given nodes.
+# :param node_ids: Ids of the nodes for which we look for common elements.
+# :param mesh:
+# :param node_els: node -> element map
+# :param subset: if true, it returns all the elements that are adjacent to at least one of the nodes
+# if false, it returns all the elements adjacent to all the nodes
+# :param max:
+# :return:
+# """
+# # Generates active elements common to given nodes.
+# node_sets = [node_els[n] for n in node_ids]
+# if subset:
+# elements = list(set(itertools.chain.from_iterable(node_sets))) # remove duplicities
+# else:
+# elements = set.intersection(*node_sets)
+#
+# if len(elements) > max:
+# print("Too many connected elements:", len(elements), " > ", max)
+# for eid in elements:
+# type, tags, node_ids = mesh.elements[eid]
+# print(" eid: ", eid, node_ids)
+# # return elements
+# return active(mesh, elements)
+#
+#
+# def active(mesh, element_iterable):
+# for eid in element_iterable:
+# if eid in mesh.elements:
+# yield eid
+
+# def test_fracture_neighbors(config_dict):
+# """
+# Function that tests finding fracture neighbors.
+# It outputs mesh data - level per element.
+# :param config_dict:
+# :return:
+# """
+# setup_dir(config_dict, clean=True)
+# mesh_repo = config_dict.get('mesh_repository', None)
+# if mesh_repo:
+# healed_mesh = sample_mesh_repository(mesh_repo)
+# config_fracture_regions(config_dict["fracture_regions"])
+# else:
+# fractures = generate_fractures(config_dict)
+# # plot_fr_orientation(fractures)
+# healed_mesh = prepare_mesh(config_dict, fractures)
+# print("Created mesh: " + os.path.basename(healed_mesh))
+#
+# mesh = gmsh_io.GmshIO(healed_mesh)
+# fracture_neighbors = find_fracture_neigh(mesh, ["fr"], n_levels=3)
+#
+# ele_ids = np.array(list(mesh.elements.keys()), dtype=float)
+# ele_ids_map = dict()
+# for i in range(len(ele_ids)):
+# ele_ids_map[ele_ids[i]] = i
+#
+# data = -1 * np.ones(shape=(len(ele_ids), 1))
+#
+# for eid, lev in fracture_neighbors:
+# data[ele_ids_map[eid]] = lev
+#
+# # Separate base from extension
+# mesh_name, extension = os.path.splitext(healed_mesh)
+# # Initial new name
+# new_mesh_name = os.path.join(os.curdir, mesh_name + "_data" + extension)
+#
+# with open(new_mesh_name, "w") as fout:
+# mesh.write_ascii(fout)
+# mesh.write_element_data(fout, ele_ids, 'data', data)
+
+
+def test_gmsh_dfn():
+ np.random.seed(123)
+ n_fr_max = 50
+ geometry_dict = dict(
+ fracture_mesh_step=10,
+ box_dimensions = [30, 30, 30],
+ well_openning = [-10, 10],
+ well_effective_radius = 2,
+ well_distance = 10
+ )
+ fractures = generate_uniform(fracture_stats, n_fr_max)
+ make_mesh(geometry_dict, fractures, "geothermal_dnf")
+
+
+def generate_uniform(statistics, n_frac_limit):
+ # generate fracture set
+ box_size = 100
+ fracture_box = 3 * [box_size]
+ pop = Population.from_cfg(statistics, fracture_box)
+ pop = pop.set_range_from_size(n_frac_limit)
+ print("total mean size: ", pop.mean_size())
+ pos_gen = UniformBoxPosition(fracture_box)
+ fractures = pop.sample(pos_distr=pos_gen, keep_nonempty=True)
+ # fracture.fr_intersect(fractures)
+ return fractures
+
+
+
+#@pytest.mark.skip
+def test_brep_dfn_3d():
+ """
+ Test 3D dfn using GMSH meshing.
+ :return:
+ """
+ np.random.seed(123)
+ fractures = generate_uniform(fracture_stats, n_frac_limit=50)
+ #fractures = [f for f in fractures]
+ #for i, f in enumerate(fractures):
+ # f.id = i
+ brep_file = geometry_brep_writer(fractures, sandbox_fname("test_dfn", "brep"))
+
+ # ipps = compute_intersections(fractures)
+ #resolve_fractures_intersection(ipss)
+
+ print('brep_test_done')
+
+ # TODO:
+ # dfn = dfn.DFN(fractures)
+ # dfn_simplified = dfn.simplify()
+ # brep = dfn_simplified.make_brep()
+
+
+
+
+# def test_brep_dfn_3d_brep_writer():
+# np.random.seed(123)
+# fractures = generate_uniform(fracture_stats, n_frac_limit=50)
+# ipps = compute_intersections(fractures)
+# resolve_fractures_intersection(ipss)
+#
+# print('brep_test_done')
+#
+# # TODO:
+# dfn = dfn.DFN(fractures)
+# dfn_simplified = dfn.simplify()
+# brep = dfn_simplified.make_brep()
+
+
+
+
diff --git a/tests/stochastic/test_fr_set.py b/tests/stochastic/test_fr_set.py
new file mode 100644
index 0000000..0d91f44
--- /dev/null
+++ b/tests/stochastic/test_fr_set.py
@@ -0,0 +1,209 @@
+import numpy as np
+import pytest
+from pathlib import Path
+from bgem import stochastic
+import matplotlib.pyplot as plt
+import matplotlib.colors as mcolors
+import numpy as np
+import fixtures
+script_dir = Path(__file__).absolute().parent
+workdir = script_dir / "sandbox"
+
+"""
+Test base shapes.
+"""
+def test_ellipse_shape():
+ shape = stochastic.EllipseShape
+
+
+
+def plot_aabb(aabb, points, inside):
+ # Prepare the AABB rectangle coordinates
+ aabb_rect = np.array([
+ aabb[ii, (0,1)] for ii in [(0,0), (1, 0), (1, 1), (0, 1), (0,0)]
+ ])
+
+ # Plotting
+ fig, ax = plt.subplots(figsize=(8, 8))
+ colors = np.where(inside, 'red', 'grey')
+ ax.scatter(points[:, 0], points[:, 1],s=1, color=colors, label='Points')
+ ax.plot(aabb_rect[:, 0], aabb_rect[:, 1], color='blue', linestyle='--', label='AABB')
+ ax.set_aspect('equal', 'box')
+ # Labels and Title
+ ax.set_xlabel('X')
+ ax.set_ylabel('Y')
+ ax.set_title('Scatter Plot with AABB')
+ ax.legend()
+
+ # Display the plot
+ ax.grid(True)
+ plt.show()
+
+@pytest.mark.parametrize("base_shape",
+ [stochastic.EllipseShape(), stochastic.RectangleShape(), stochastic.PolygonShape(6), stochastic.PolygonShape(8)]
+)
+def test_base_shapes(base_shape):
+ """
+ Use MC integration to:
+ - confirm the shape has unit area
+ - check it could determine interrior points (but not that this check is correct)
+ - check that the corresponding primitive could be made in GMSH interface
+ - confirm that aabb is correct for that primitive
+ :param shape:
+ :return:
+ """
+
+ # Take AABB of the reference shape
+ aabb = base_shape.aabb
+ assert aabb.shape == (2, 2)
+ assert np.allclose(aabb[0] + aabb[1], 0)
+ safe_aabb = 2 * aabb
+
+ # equivalence of is_point_inside ande are_points_inside
+ N = 1000
+ points = np.random.random((N, 2)) * (safe_aabb[1] - safe_aabb[0]) + safe_aabb[0]
+ inside_single = [base_shape.is_point_inside(*pt) for pt in points]
+ inside_vector = base_shape.are_points_inside(points)
+
+ assert np.all(np.array(inside_single, dtype=bool) == inside_vector)
+ out_of_aabb = np.logical_or.reduce((*(points < aabb[0]).T, *(aabb[1] < points).T))
+ any_out = np.any(inside_vector & out_of_aabb)
+ #if any_out:
+ #plot_aabb(aabb, points, inside_vector)
+ assert not any_out
+
+
+ N = 100000
+ points = np.random.random((N, 2)) * (aabb[1] - aabb[0]) + aabb[0]
+ N_in = sum(base_shape.are_points_inside(points))
+ aabb_area = np.prod(aabb[1] - aabb[0])
+ area_estimate = N_in / N * aabb_area
+ assert abs(area_estimate - 1.0) < 0.01
+
+ # test vertices method
+
+
+
+def check_ortogonal_columns(mat):
+ product = mat.T @ mat
+ product_offdiag = product - np.diag(np.diag(product))
+ assert np.allclose(product_offdiag, np.zeros_like(product))
+
+
+def check_fractures_transform_mat(fr_list):
+ dfn = stochastic.FractureSet.from_list(fr_list)
+ # BREP of the fractures
+ # dfn.make_fractures_brep(workdir / "transformed")
+
+ dfn_base = dfn.transform_mat @ np.eye(3)
+
+ for i, fr in enumerate(fr_list):
+ print(f"fr #{i}")
+ base_vectors = dfn_base[i]
+ assert base_vectors.shape == (3, 3)
+ ref_base_1 = (fr.transform(np.eye(3)) - fr.center).T
+ # Origianl fracture transform with respect to DFN transfom matrix.
+ assert np.allclose(dfn.center[i], fr.center)
+
+ # Test base is perpendicular
+ check_ortogonal_columns(ref_base_1)
+ check_ortogonal_columns(base_vectors)
+
+ assert np.allclose(base_vectors, ref_base_1), f"fr #{i} diff:\n {base_vectors}\n {ref_base_1}\n"
+
+ # Tak a single fracutre from DFN and compare its transfom to the DFN transform.
+ fr_2 = dfn[i]
+ ref_base_2 = (fr_2.transform(np.eye(3)) - fr.center).T
+ assert np.allclose(dfn.center[i], fr_2.center)
+ assert np.allclose(base_vectors, ref_base_2)
+
+ # Check rotation matrix
+ assert np.allclose( dfn.rotation_mat.transpose((0, 2, 1)) @ dfn.rotation_mat, np.eye(3))
+
+ # Check inverse transfrom
+ assert np.allclose(dfn.inv_transform_mat @ dfn.transform_mat, np.eye(3))
+
+
+
+
+
+
+
+
+def test_transform_mat():
+ """
+ Apply transfrom for
+ :return:
+ """
+ # Tests without shape rotation.
+
+ #shape_id = stochastic.EllipseShape.id
+ shape_id = stochastic.RectangleShape.id
+ fr = lambda s, c, n: stochastic.Fracture(shape_id, np.array(s), np.array(c), np.array(n) / np.linalg.norm(n))
+ fractures = [
+ fr([1, 1], [1, 2, 3], [0, 0, 1]),
+ fr([2, 2], [1, 2, 3], [0, 0, 1]),
+ fr([2, 3], [1, 2, 3], [1, 1, 0.2]),
+ fr([2, 3], [1, 2, 3], [-1, -1, 0.2]),
+ fr([2, 3], [1, 2, 3], [0, 0, -1]),
+ fr([2, 3], [0, 0, 0], [0, 1, 0]),
+ fr([2, 3], [0, 0, 0], [0, -1, 0]),
+ fr([2, 3], [0, 0, 0], [1, 0, 0]),
+ fr([2, 3], [0, 0, 0], [-1, 0, 0]),
+ fr([2, 3], [0, 0, 0], [1, 2, 3]),
+ ]
+ check_fractures_transform_mat(fractures)
+
+ fr = lambda s, c, n, ax: stochastic.Fracture(shape_id, np.array(s), np.array(c), np.array(n)/np.linalg.norm(n), np.array(ax)/np.linalg.norm(ax))
+ s = [2, 2] #[2, 3]
+ fractures = [
+ fr(s, [0,0,0], [0, 0, 1], [1, 1]),
+ fr(s, [0,0,0], [0, 0, 1], [-1, 1]),
+ fr(s, [0, 0, 0], [0, 0, 1], [-1, -1]),
+ fr(s, [0, 0, 0], [0, 0, 1], [1, -1]),
+ fr(s, [0, 0, 0], [0, 0, 1], [1, 2]),
+ fr(s, [0, 0, 0], [1, -0.5, -3], [1, 0]),
+
+ fr(s, [1, 2, 3], [0, 1, 1], [1, 2]), # out of order
+ fr(s, [1, 2, 3], [1, -0.5, -3], [1, 2]),
+ fr(s, [1, 2, 3], [1, -0.5, -3], [-1, -1]), # out of order
+
+ fr(s, [1, 2, 3], [0, 0, -1], [1,0]),
+ fr(s, [0, 0, 0], [0, 1, 0], [1,0]),
+ fr(s, [0, 0, 0], [0, -1, 0], [1,0]),
+ fr(s, [0, 0, 0], [1, 0, 0], [1,0]),
+ fr(s, [0, 0, 0], [-1, 0, 0], [1,0]),
+ ]
+ check_fractures_transform_mat(fractures)
+
+ fractures = fixtures.get_dfn_sample()
+ check_fractures_transform_mat(fractures)
+
+ # fracture.fr_intersect(fractures)
+
+ # stochastic.Fracture(shape_id, np.array(s), np.array(c), np.array(n))
+
+
+
+@pytest.mark.parametrize("base_shape",
+ [stochastic.EllipseShape(), stochastic.RectangleShape(), stochastic.PolygonShape(6), stochastic.PolygonShape(8)]
+)
+def test_fracture_set_AABB(base_shape):
+ """
+ Test that the AABB of the fractures is correct.
+ We only test that AABB is an upper bound.
+ The relative volume
+ """
+ fractures = fixtures.get_dfn_sample()
+ base_polygon = base_shape.vertices(256*256)
+ tight=0
+ for i, fr in enumerate(fractures):
+ boundary_points = fractures.transform_mat[i] @ base_polygon[:, :, None] + fr.center
+ min_corner, max_corner = fractures.AABB[i]
+ assert np.all(min_corner[None, :] <= boundary_points)
+ assert np.all(max_corner[None, :] >= boundary_points)
+ l_tight = np.min(boundary_points - min_corner[None, :])
+ u_tight = np.min(max_corner[None, :] - boundary_points)
+ rel_tight = max(l_tight, u_tight) / np.linalg.norm(fractures.radius[i])
+ tight = max(tight, rel_tight)
+ print("Tight:", tight, rel_tight)
\ No newline at end of file
diff --git a/tests/stochastic/test_fracture.py b/tests/stochastic/test_fracture.py
deleted file mode 100644
index 4cd94b5..0000000
--- a/tests/stochastic/test_fracture.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import numpy as np
-import pytest
-from bgem.stochastic import fracture as frac
-import matplotlib.pyplot as plt
-import matplotlib.colors as mcolors
-import numpy as np
-
-def test_PowerLawSize():
- powers = [0.8, 1.6, 2.9, 3, 3.2]
- cmap = plt.get_cmap('gnuplot')
- colors = [cmap(i) for i in np.linspace(0, 1, len(powers))]
-
- fig = plt.figure(figsize = (16, 9))
- axes = fig.subplots(1, 2, sharey=True)
- for i, power in enumerate(powers):
- diam_range = (0.1, 10)
- distr = frac.PowerLawSize(power, diam_range, 1000)
- sizes = distr.sample(volume=1, size=10000)
- sizes.sort()
- x = np.geomspace(*diam_range, 30)
- y = [distr.cdf(xv, diam_range) for xv in x]
- z = [distr.ppf(yv, diam_range) for yv in y]
- np.allclose(x, z)
- axes[0].set_xscale('log')
- axes[0].plot(x, y, label=str(power), c=colors[i])
-
- axes[0].plot(sizes[::100], np.linspace(0, 1, len(sizes))[::100], c=colors[i], marker='+')
- sample_range = [0.1, 1]
- x1 = np.geomspace(*sample_range, 200)
- y1 = [distr.cdf(xv, sample_range) for xv in x1]
- axes[1].set_xscale('log')
- axes[1].plot(x1, y1, label=str(power))
- fig.legend()
- plt.show()
diff --git a/tests/stochastic/test_hausdorf_dfn.py b/tests/stochastic/test_hausdorf_dfn.py
index 941c933..10cb1ef 100644
--- a/tests/stochastic/test_hausdorf_dfn.py
+++ b/tests/stochastic/test_hausdorf_dfn.py
@@ -14,7 +14,7 @@
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import collections as mc
-from bgem.stochastic import fracture
+from bgem.stochastic import dfn
def boxcount(Z, k):
@@ -93,26 +93,29 @@ def plot_dfn(power):
fracture_box = [1, 1, 0]
sample_range = (0.001, 1)
- pop = fracture.Population(fracture_box[0] * fracture_box[1])
power = 2.1
conf_range = [0.001, 1]
p_32 = 100
#p_32 = 0.094
- size = fracture.PowerLawSize.from_mean_area(power-1, conf_range, p_32, power)
- pop.add_family("all",
- orientation=fracture.FisherOrientation(0, 90, 0),
- shape=size,
- shape_angle=fracture.VonMisesOrientation(0, 0)
- )
+ size = dfn.PowerLawSize.from_mean_area(power-1, conf_range, p_32, power)
+ family = dfn.FrFamily(
+ orientation=dfn.FisherOrientation(0, 90, 0),
+ size=size,
+ shape_angle=dfn.VonMisesOrientation(0, 0)
+ )
+ pop = dfn.Population(
+ domain=(fracture_box[0], fracture_box[1], 0),
+ families=[family]
+ )
pop.set_sample_range(sample_range)
- pos_gen = fracture.UniformBoxPosition(fracture_box)
+ pos_gen = dfn.UniformBoxPosition(fracture_box)
print("total mean size: ", pop.mean_size())
fractures = pop.sample(pos_distr=pos_gen, keep_nonempty=True)
print("N frac:", len(fractures))
sizes = []
for fr in fractures:
- t = 0.5 * fr.r * np.array([-fr.normal[0][2], fr.normal[0][1], 0])
+ t = 0.5 * fr.r * np.array([-fr.normal[2], fr.normal[1], 0])
a = scale_cut(0.5 + fr.center - t, s)
b = scale_cut(0.5 + fr.center + t, s)
draw.line((a, b), fill=(0,0,0), width=1)
diff --git a/tests/stochastic/test_positions.py b/tests/stochastic/test_positions.py
index 953af2e..fba35a3 100644
--- a/tests/stochastic/test_positions.py
+++ b/tests/stochastic/test_positions.py
@@ -5,7 +5,7 @@
import math
# from bgem
-from bgem.stochastic.fracture import Fracture, SquareShape
+from bgem.stochastic import Fracture, RectangleShape
from bgem.bspline import brep_writer as bw
from bgem import Transform
@@ -80,18 +80,19 @@ def test_angle():
r1 = 7.0
offset = np.array([5.0, 5.0, 0.0])
center1 = np.array([0.0, 0.0, 0.0])+offset
- normal1 = np.array([[0.0, 0.0, 1.0]])
+ normal1 = np.array([0.0, 0.0, 1.0])
normal1 = normal1 / np.linalg.norm(normal1)
angle1 = 0.0
r2 = 2.0
center2 = np.array([0.0, 0.0, r2/2 + eps])+offset
- normal2 = np.array([[0.0, math.cos(phi), math.sin(phi)]])
+ normal2 = np.array([0.0, math.cos(phi), math.sin(phi)])
normal2 = normal2 / np.linalg.norm(normal2)
angle2 = 0.0
- frac_X1= Fracture(SquareShape,r1, center1 , normal1 ,angle1, 1.0)
- frac_X2= Fracture(SquareShape,r2, center2 , normal2 ,angle2, 1.0)
+ family = 0
+ frac_X1= Fracture(RectangleShape.id, r1, center1 , normal1 ,angle1)
+ frac_X2= Fracture(RectangleShape.id, r2, center2 , normal2 ,angle2)
X1_vert = frac_X1.transform(frac_X1.ref_vertices)
X2_vert = frac_X2.transform(frac_X2.ref_vertices)
@@ -151,18 +152,18 @@ def test_cross():
eps = 0.01
r1 = 5.0
center1 = np.array([0.0, 0.0, 0.0])
- normal1 = np.array([[math.sin(phi), 0.0, math.cos(phi)]])
+ normal1 = np.array([math.sin(phi), 0.0, math.cos(phi)])
normal1 = normal1 / np.linalg.norm(normal1)
angle1 = 0.0
r2 = 8.0
center2 = np.array([r2/2 + r1/2 + eps,0.0 , 0 ])
- normal2 = np.array([[0.0,1.0, 0.0]]) #
+ normal2 = np.array([0.0,1.0, 0.0]) #
normal2 = normal2 / np.linalg.norm(normal2)
angle2 = 0.0
- frac_X1= Fracture(SquareShape,r1, center1 , normal1 ,angle1, 1.0)
- frac_X2= Fracture(SquareShape,r2, center2 , normal2 ,angle2, 1.0)
+ frac_X1= Fracture(RectangleShape.id,r1, center1 , normal1 ,angle1)
+ frac_X2= Fracture(RectangleShape.id,r2, center2 , normal2 ,angle2)
X1_vert = frac_X1.transform(frac_X1.ref_vertices)
X2_vert = frac_X2.transform(frac_X2.ref_vertices)
diff --git a/tests/stochastic/test_transform.py b/tests/stochastic/test_transform.py
index 7646aa8..77484a7 100644
--- a/tests/stochastic/test_transform.py
+++ b/tests/stochastic/test_transform.py
@@ -3,20 +3,23 @@
import math
# from bgem
-from bgem.stochastic import fracture
+from bgem.stochastic import fr_set
from bgem.bspline import brep_writer as bw
from bgem import Transform
from fixtures import sandbox_fname
def test_trans():
"""
- Create the BREP file from a list of fractures using the brep writer interface.
+ Create the BREP file from a list of fr_sets using the brep writer interface.
"""
faces = []
-
- frac_X1= fracture.Fracture(fracture.SquareShape,1.0, np.array([1.0, 5.0, 3.0]), np.array([[1.0, -2.0, 1.0]])/np.linalg.norm(np.array([[1.0, -2.0, 1.0]])),math.pi/4, 1.0)
- frac_X2= fracture.Fracture(fracture.SquareShape,2.0, np.array([1.0, 5.0, 3.0]), np.array([[1.0, 1.0, 2.0]])/np.linalg.norm(np.array([[1.0, 1.0, 2.0]])),math.pi/3, 1.0)
- frac_X3= fracture.Fracture(fracture.SquareShape,5.0, np.array([1.0, 5.0, 3.0]), np.array([[3.0, 2.0, 1.0]])/np.linalg.norm(np.array([[3.0, 2.0, 1.0]])),math.pi/6, 1.0)
+ square_id = fr_set.RectangleShape().id
+ frac_fn = lambda r, normal, shape_angle: fr_set.Fracture(
+ square_id, r, np.array([1.0, 5.0, 3.0]), normal / np.linalg.norm(normal),
+ shape_angle)
+ frac_X1= frac_fn(1.0, [1.0, -2.0, 1.0], math.pi/4)
+ frac_X2= frac_fn(2.0, [1.0, 1.0, 2.0], math.pi/3)
+ frac_X3= frac_fn(5.0, [1.0, 5.0, 3.0], math.pi/6)
X1_vert = frac_X1.transform(frac_X1.ref_vertices)
X2_vert = frac_X2.transform(frac_X2.ref_vertices)
diff --git a/tests/upscale/flow_upscale_templ.yaml b/tests/upscale/flow_upscale_templ.yaml
new file mode 100644
index 0000000..2d28fc9
--- /dev/null
+++ b/tests/upscale/flow_upscale_templ.yaml
@@ -0,0 +1,52 @@
+#################
+# Author: Jan Brezina
+
+flow123d_version: 3.9.0
+problem: !Coupling_Sequential
+ description: |
+ Single scale reference field 3D Darcy flow.
+ Driven by unit pressure gradient.
+ mesh:
+ # Input mesh with 'outer' and 'borehole' regions.
+ mesh_file:
+ flow_equation: !Flow_Darcy_LMH
+ output_specific:
+ nonlinear_solver:
+ linear_solver: !Petsc
+ a_tol: 1e-6
+ r_tol: 1e-3
+ # options:
+ input_fields:
+ - region: .BOUNDARY
+ bc_type: dirichlet
+ bc_piezo_head: !FieldFormula
+ value: " @ [x,y,z]"
+ - region: ALL
+ conductivity: !FieldFE
+ mesh_data_file:
+ field_name: conductivity
+ cross_section: !FieldFE
+ mesh_data_file:
+ field_name: cross_section
+ #- region: outer
+ #conductivity: 1e-14
+ # TODO: heterogeneous layerd and possibly anisotropic conductivity here
+ # given as FieldFormula or rather FieldFE computed for particular position.
+ n_schurs: 2
+ output:
+ fields:
+ - piezo_head_p0
+ #- pressure_p0
+ #- pressure_p1
+ - cross_section
+ - velocity_p0
+ - region_id
+ - conductivity
+ balance: {}
+ output_stream:
+ #format: !gmsh
+ format: !vtk
+ # variant: ascii
+
+
+
diff --git a/tests/upscale/fractures_conf.yaml b/tests/upscale/fractures_conf.yaml
new file mode 100644
index 0000000..1eafd47
--- /dev/null
+++ b/tests/upscale/fractures_conf.yaml
@@ -0,0 +1,44 @@
+# Fracture network configuration, list of fracture families.
+# This data set taken from SKB report.
+
+- name: NS
+ trend: 292
+ plunge: 1
+ concentration: 17.8
+ power: 2.5
+# r_min: 0.038
+ r_min: 1
+ r_max: 564
+ p_32: 0.094
+- name: NE
+ trend: 326
+ plunge: 2
+ concentration: 14.3
+ power: 2.7
+ r_min: 1
+ r_max: 564
+ p_32: 0.163
+- name: NW
+ trend: 60
+ plunge: 6
+ concentration: 12.9
+ power: 3.1
+ r_min: 1
+ r_max: 564
+ p_32: 0.098
+- name: EW
+ trend: 15
+ plunge: 2
+ concentration: 14.0
+ power: 3.1
+ r_min: 1
+ r_max: 564
+ p_32: 0.039
+- name: HZ
+ trend: 5
+ plunge: 86
+ concentration: 15.2
+ power: 2.38
+ r_min: 1
+ r_max: 564
+ p_32: 0.141
diff --git a/tests/upscale/mesh_class.py b/tests/upscale/mesh_class.py
new file mode 100644
index 0000000..c2a08ec
--- /dev/null
+++ b/tests/upscale/mesh_class.py
@@ -0,0 +1,342 @@
+from typing import Dict, Tuple, List
+
+import attrs
+import bih
+import numpy as np
+# from numba import njit
+import bisect
+
+from bgem.gmsh.gmsh_io import GmshIO
+from bgem.gmsh import heal_mesh
+
+from bgem.core import File, memoize, report
+
+
+#@njit
+def element_vertices(all_nodes: np.array, node_indices: np.array):
+ return all_nodes[node_indices[:], :]
+
+
+#@njit
+def element_loc_mat(all_nodes: np.array, node_indices: List[int]):
+ n = element_vertices(all_nodes, node_indices)
+ return (n[1:, :] - n[0]).T
+
+
+#@njit
+def element_compute_volume(all_nodes: np.array, node_indices: List[int]):
+ return np.linalg.det(element_loc_mat(all_nodes, node_indices)) / 6
+
+
+@attrs.define
+class Element:
+ mesh: 'Mesh'
+ type: int
+ tags: Tuple[int, int]
+ node_indices: List[int]
+
+ def vertices(self):
+ return element_vertices(self.mesh.nodes, np.array(self.node_indices, dtype=int))
+
+ def loc_mat(self):
+ return element_loc_mat(self.mesh.nodes, self.node_indices)
+
+ def volume(self):
+ return element_compute_volume(self.mesh.nodes, self.node_indices)
+
+ def barycenter(self):
+ return np.mean(self.vertices(), axis=0)
+
+ def gmsh_tuple(self, node_map):
+ node_ids = [node_map[inode] for inode in self.node_indices]
+ return (self.type, self.tags, node_ids)
+
+
+
+
+#@memoize
+def _load_mesh(mesh_file: 'File', heal_tol = None):
+
+ # mesh_file = mesh_file.path
+ if heal_tol is None:
+ gmsh_io = GmshIO(str(mesh_file))
+ return Mesh(gmsh_io, file = mesh_file)
+ else:
+ hm = heal_mesh.HealMesh.read_mesh(str(mesh_file), node_tol= heal_tol * 0.8 )
+ report(hm.heal_mesh)(gamma_tol=heal_tol)
+ #hm.move_all(geom_dict["shift_vec"])
+ #elm_to_orig_reg = hm.map_regions(new_reg_map)
+ report(hm.stats_to_yaml)(mesh_file.with_suffix(".heal_stats.yaml"))
+ #assert hm.healed_mesh_name == mesh_healed
+ hm.write()
+ return Mesh.load_mesh(hm.healed_mesh_name, None)
+
+ # !! can not memoize static and class methods (have no name)
+
+
+#@report
+#@njit
+def mesh_compute_el_volumes(nodes:np.array, node_indices :np.array) -> np.array:
+ return np.array([element_compute_volume(nodes, ni) for ni in node_indices])
+
+
+class Mesh:
+
+ @staticmethod
+ def load_mesh(mesh_file: 'File', heal_tol=None) -> 'Mesh':
+ return _load_mesh(mesh_file, heal_tol)
+
+ @staticmethod
+ def empty(mesh_path) -> 'Mesh':
+ return Mesh(GmshIO(), mesh_path)
+
+ def __init__(self, gmsh_io: GmshIO, file):
+
+ self.gmsh_io : GmshIO = gmsh_io
+ # TODO: remove relation to file
+ # rather use a sort of generic wrapper around loadable objects
+ # in order to relay on the underlaing files for the caching
+ self.file : 'File' = file
+ self.reinit()
+
+
+ def reinit(self):
+ # bounding interval hierarchy for the mesh elements
+ # numbers elements from 0 as they are added
+ self._update_nodes()
+ self._update_elements()
+ self._update_regions()
+
+ # _boxes: List[bih.AABB]
+ self._bih: bih.BIH = None
+
+ self._el_volumes:np.array = None
+ self._el_barycenters:np.array = None
+
+ def _update_nodes(self):
+ self.node_ids = []
+ self.node_indices = {}
+ self.nodes = np.empty((len(self.gmsh_io.nodes), 3))
+ for i, (nid, node) in enumerate(self.gmsh_io.nodes.items()):
+ self.node_indices[nid] = i
+ self.node_ids.append(nid)
+ self.nodes[i, :] = node
+
+ def _update_elements(self):
+ self.el_ids = []
+ self.el_indices = {}
+ self.elements = []
+ for i, (eid, el) in enumerate(self.gmsh_io.elements.items()):
+ type, tags, node_ids = el
+ element = Element(self, type, tags, [self.node_indices[nid] for nid in node_ids])
+ self.el_indices[eid] = i
+ self.el_ids.append(eid)
+ self.elements.append(element)
+
+ def _update_regions(self):
+ self.regions = {dim_tag: name for name, dim_tag in self.gmsh_io.physical.items()}
+
+ def __getstate__(self):
+ return (self.gmsh_io, self.file)
+
+ def __setstate__(self, args):
+ self.gmsh_io, self.file = args
+ self.reinit()
+
+ @property
+ def bih(self):
+ if self._bih is None:
+ self._bih = self._build_bih()
+ return self._bih
+
+ def _build_bih(self):
+ el_boxes = []
+ for el in self.elements:
+ node_coords = el.vertices()
+ box = bih.AABB(node_coords)
+ el_boxes.append(box)
+ _bih = bih.BIH()
+ _bih.add_boxes(el_boxes)
+ _bih.construct()
+ return _bih
+
+
+
+ def candidate_indices(self, box):
+ list_box = box.tolist()
+ return self.bih.find_box(bih.AABB(list_box))
+
+ # def el_volume(self, id):
+ # return self.elements[self.el_indices[id]].volume()
+
+ @property
+ #@report
+ def el_volumes(self):
+ if self._el_volumes is None:
+ node_indices = np.array([e.node_indices for e in self.elements], dtype=int)
+ print(f"Compute el volumes: {self.nodes.shape}, {node_indices.shape}")
+ self._el_volumes = mesh_compute_el_volumes(self.nodes, node_indices)
+ return self._el_volumes
+
+
+
+ def el_barycenters(self):
+ if self._el_barycenters is None:
+ self._el_barycenters = np.array([e.barycenter() for e in self.elements])
+ return self._el_barycenters
+
+
+
+ def fr_map(self, dfn:'FractureSet', reg_id_to_fr:Dict[str, int]):
+ """
+ Get int field with fracture idx for fracture elements and len(dfn) for other
+ :param dfn:
+ :param reg_id_to_fr:
+ :return:
+ TODO: better way to map elements to frature numbers
+ Limiting factors:
+ - we have no control over actual region IDs produced by meshing so we must use
+ physical names to encode fracture numbers
+ - alternative could be based on functional approach that would allow mapping shape IDs back to shapes
+ and then shapes (including fractures), may be asiciated with various attributes
+ - we currently we also depend on gmsh_io.regions when reading the mesh
+ """
+ # TODO better association mechanism
+ #fr_reg_to_idx = {fr.region.id - 100000 - 2: idx for idx, fr in enumerate(fractures)}
+ def fr_id_from_name(name : str):
+ try:
+ _, fam, fr_id = name.split('_')
+ fr_id = int(fr_id)
+ except ValueError:
+ fr_id = len(dfn)
+ return fr_id
+ el_type = [31, 1, 2, 4] # gmsh element types
+ reg_id_to_fr = { (el_type[dim], reg_id): fr_id_from_name(name) for (reg_id, dim), name in self.regions.items()}
+ fr_map = [reg_id_to_fr.get((e.type, e.tags[0]), len(dfn)) for e in self.elements]
+ return np.array(fr_map)
+
+ # def el_loc_mat(self, id):
+ # return self.elements[self.el_indices[id]].loc_mat()
+
+ # def el_barycenter(self, id):
+ # return self.elements[self.el_indices[id]].barycenter()
+
+ # def el_nodes(self, id):
+ # return self.elements[self.el_indices[id]].vertices()
+
+ def submesh(self, elements, file_path):
+ gmesh = GmshIO()
+ active_nodes = np.full( (len(self.nodes),), False)
+ for iel in elements:
+ el = self.elements[iel]
+ active_nodes[el.node_indices] = True
+ sub_nodes = self.nodes[active_nodes]
+ new_for_old_nodes = np.zeros((len(self.nodes),), dtype=int)
+ new_for_old_nodes[active_nodes] = np.arange(1,len(sub_nodes)+1, dtype=int)
+ gmesh.nodes = {(nidx+1):node for nidx, node in enumerate(sub_nodes)}
+ gmesh.elements = {(eidx+100): self.elements[iel].gmsh_tuple(node_map=new_for_old_nodes) for eidx, iel in enumerate(elements)}
+ #print(gmesh.elements)
+ gmesh.physical = self.gmsh_io.physical
+ #gmesh.write(file_path)
+ gmesh.normalize()
+ return Mesh(gmesh, "")
+
+ # Returns field P0 values of field.
+ # Selects the closest time step lower than 'time'.
+ # TODO: we might do time interpolation
+ def get_p0_values(self, field_name:str, time):
+ field_dict = self.gmsh_io.element_data[field_name]
+
+ # determine maximal index of time step, where times[idx] <= time
+ times = [v.time for v in list(field_dict.values())]
+ last_time_idx = bisect.bisect_right(times, time) - 1
+
+ values = field_dict[last_time_idx].values
+ value_ids = field_dict[last_time_idx].tags
+ value_to_el_idx = [self.el_indices[iv] for iv in value_ids]
+ values_mesh = np.empty_like(values)
+ values_mesh[value_to_el_idx[:]] = values
+ return values_mesh
+
+ def get_static_p0_values(self, field_name:str):
+ field_dict = self.gmsh_io.element_data[field_name]
+ assert len(field_dict) == 1
+ values = field_dict[0].values
+ value_ids = field_dict[0].tags
+ value_to_el_idx = [self.el_indices[iv] for iv in value_ids]
+ values_mesh = np.empty_like(values)
+ values_mesh[value_to_el_idx[:]] = values
+ return values_mesh
+
+ def get_static_p1_values(self, field_name:str):
+ field_dict = self.gmsh_io.node_data[field_name]
+ assert len(field_dict) == 1
+ values = field_dict[0].values
+ value_ids = field_dict[0].tags
+ value_to_node_idx = [self.node_indices[iv] for iv in value_ids]
+ values_mesh = np.empty_like(values)
+ values_mesh[value_to_node_idx[:]] = values
+ return values_mesh
+
+
+ def write_fields(self, file_name:str, fields: Dict[str, np.array]=None) -> 'File':
+ self.gmsh_io.write(file_name, format="msh2")
+ if fields is not None:
+ self.gmsh_io.write_fields(file_name, self.el_ids, fields)
+ return file_name #File(file_name)
+
+
+ def map_regions(self, new_reg_map):
+ """
+ Replace all (reg_id, dim) regions by the new regions.
+ new_reg_map: (reg_id, dim) -> new (reg_id, dim, reg_name)
+ return: el_id -> old_reg_id
+ """
+ #print(self.mesh.physical)
+ #print(new_reg_map)
+
+ new_els = {}
+ el_to_old_reg = {}
+
+ for el_id, el in self.gmsh_io.elements.items():
+ type, tags, nodes = el
+ tags = list(tags)
+ old_reg_id = tags[0]
+ dim = len(nodes) - 1
+ old_id_dim = (old_reg_id, dim)
+ if old_id_dim in new_reg_map:
+ el_to_old_reg[self.el_indices[el_id]] = old_id_dim
+ reg_id, reg_dim, reg_name = new_reg_map[old_id_dim]
+ if reg_dim != dim:
+ Exception(f"Assigning region of wrong dimension: ele dim: {dim} region dim: {reg_dim}")
+ self.gmsh_io.physical[reg_name] = (reg_id, reg_dim)
+ tags[0] = reg_id
+ self.gmsh_io.elements[el_id] = (type, tags, nodes)
+ # remove old regions
+ id_to_reg = {id_dim: k for k, id_dim in self.gmsh_io.physical.items()}
+ for old_id_dim in new_reg_map.keys():
+ if old_id_dim in id_to_reg:
+ del self.gmsh_io.physical[id_to_reg[old_id_dim]]
+
+ el_ids = self.el_ids
+ self._update_elements()
+ assert_idx = np.random.randint(0, len(el_ids), 50)
+ assert all((el_ids[i] == self.el_ids[i] for i in assert_idx))
+ return el_to_old_reg
+
+ def el_dim_slice(self, dim):
+ i_begin = len(self.elements)
+ i_end = i_begin
+ el_type = [21, 1, 2, 4][dim]
+ for iel, el in enumerate(self.elements):
+ if el.type == el_type:
+ i_begin = iel
+ break
+ for iel, el in enumerate(self.elements[i_begin:], start=i_begin):
+ if el.type != el_type:
+ i_end = iel
+ break
+ for iel, el in enumerate(self.elements[i_end:], start=i_end):
+ if el.type == el_type:
+ raise IndexError(f"Elements of dimension {dim} does not form a slice.")
+ return slice(i_begin, i_end, 1)
\ No newline at end of file
diff --git a/tests/upscale/test_fem.py b/tests/upscale/test_fem.py
new file mode 100644
index 0000000..f40921e
--- /dev/null
+++ b/tests/upscale/test_fem.py
@@ -0,0 +1,350 @@
+import numpy as np
+import pytest
+
+#from bgem.stochastic import dfn
+from bgem.upscale import fem, fields, fem_plot
+
+# --------------------------------------------------
+# FE class tests
+# --------------------------------------------------
+
+def basis_1():
+ points = np.array([0.0, 1.0])
+ basis = fem.Q1_1d_basis(points)
+ return basis, points
+
+def basis_2():
+ points = np.array([0.0, 0.5, 1.0])
+ basis = fem.Q1_1d_basis(points)
+ return basis, points
+
+
+
+#@pytest.mark.skip()
+def test_Q1_1D_basis():
+ basis_order_1, points = basis_1()
+ assert basis_order_1.shape == (2, 2)
+ np.allclose(fem.eval_1d(basis_order_1, points), np.eye(2,2))
+ print("Q1 order 1 basis: \n", basis_order_1)
+
+ basis_order_2, points = basis_2()
+ assert basis_order_2.shape == (3, 3)
+ np.allclose(fem.eval_1d(basis_order_2, points), np.eye(3, 3))
+
+ print("Q1 order 2 basis: \n", basis_order_2)
+
+
+
+#@pytest.mark.skip()
+def test_poly_diff_1d():
+ diff_order_1 = fem.poly_diff_1d(basis_1()[0])
+ assert diff_order_1.shape == (2, 1)
+ print("Q1 order 1 diff basis: \n", diff_order_1)
+ diff_order_2 = fem.poly_diff_1d(basis_2()[0])
+ assert diff_order_2.shape == (3, 2)
+ print("Q1 order 2 diff basis: \n", diff_order_2)
+
+#@pytest.mark.skip()
+def test_eval_1d():
+ basis_order_1, _ = basis_1()
+ points = [0.2, 0.7]
+ values = [[0.2, 0.7], [0.8, 0.3]]
+ np.allclose(fem.eval_1d(basis_order_1, points), values)
+
+#@pytest.mark.skip()
+def test_Fe_Q1():
+ for dim in range(1, 4):
+ order = 1
+ f = fem.Fe.Q(dim, order)
+ points_1d = np.linspace(0, 1, 2*order + 1)
+ points = np.stack([
+ points_1d,
+ *(dim - 1) * [np.zeros_like(points_1d)]
+ ])
+ basis = f.eval(points)
+ assert basis.shape == ((order + 1)**dim, len(points_1d))
+ grad = f.grad_eval(points)
+ assert grad.shape == (dim, (order + 1)**dim, len(points_1d))
+
+#@pytest.mark.skip()
+def test_flatten_dim():
+ x = np.outer([1, 2, 3, 4, 5, 6, 7, 8], [10, 100, 1000])
+ tensor_x = fem.tensor_dim(x, 3, 2)
+ assert tensor_x.shape == (2, 2, 2, 3)
+ #print(tensor_x)
+ flat_x = fem.flat_dim(tensor_x, 3)
+ assert flat_x.shape == x.shape
+ assert np.allclose(flat_x, x)
+
+
+# --------------------------------------------------
+# Grid class tests
+# --------------------------------------------------
+#@pytest.mark.skip()
+def test_grid_init():
+ g = fem.Grid((100, 150, 200), (4, 3, 2), origin=(-4, -5, -6))
+ assert g.dim == 3
+ assert np.allclose(g.origin, [-4, -5, -6])
+ assert np.allclose(g.dimensions, [100, 150, 200])
+ assert np.allclose(g.shape, [4, 3, 2])
+
+ # basic properties
+ assert np.allclose(g.step, [25, 50, 100])
+ assert g.n_elements == 24
+
+
+#@pytest.mark.skip()
+def test_barycenters():
+ origin = [-4, -5, -6]
+ g = fem.Grid((100, 150, 200), (4, 3, 2), origin=origin)
+ xyz_grid = np.meshgrid(*[np.arange(n_els) for n_els in g.shape], indexing='ij')
+ ref_barycenters = (np.stack(xyz_grid, axis=-1).reshape(-1, 3) + 0.5) * g.step + origin
+ assert np.allclose(g.barycenters(), ref_barycenters)
+
+# --------------------------------------------------
+# FEM class tests
+# --------------------------------------------------
+
+#@pytest.mark.skip()
+def test_grid_init():
+ g = fem.Grid((100, 150, 200), (4, 3, 2), origin=(-4, -5, -6))
+ f = fem.FEM(g, fem.Fe.Q(3, 1))
+
+ assert f.n_loc_dofs == 8
+ assert np.allclose(f.dofs_shape, [5, 4, 3])
+ assert f.n_dofs == 60
+ assert np.allclose(f.dof_coord_coef, [12, 3, 1])
+
+ # numberings
+ assert f.n_bc_dofs == 60 - 6
+ ref_natur_map = [
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 17, 18, 20, 21, 22, 23,
+ 24, 25, 26, 27, 29, 30, 32, 33, 34, 35,
+ 36, 37, 38, 39, 41, 42, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 16, 19, 28, 31, 40, 43
+ ]
+ assert np.allclose(f.natur_map, ref_natur_map)
+ # gives natural dof index for given calculation dof index
+ # natural numbering comes from flattened (ix, iy, iz) dof coordinates
+ # calculation numbering puts Dirichlet DOFs at the begining
+ nx, ny, nz = f.grid.shape
+ for ix in range(f.grid.shape[0]):
+ for iy in range(f.grid.shape[1]):
+ for iz in range(f.grid.shape[2]):
+ i_dof_0 = (ix * (ny+1) + iy) * (nz+1) + iz
+
+ natur_el_dofs = i_dof_0 + np.array([0, 1, (nz+1), (nz+1) + 1,
+ (nz+1)*(ny+1), (nz+1)*(ny+1) + 1, (nz+1)*(ny+2), (nz+1)*(ny+2) + 1])
+
+ i_el = (ix * ny + iy) * nz + iz
+ assert np.allclose(f.natur_map[f.el_dofs[i_el]], natur_el_dofs)
+ # shape (n_elements, n_local_dofs), DOF indices in calculation numbering
+
+
+# def test_nodes():
+# origin = [-4, -5, -6]
+# g = fem.Grid((100, 150, 200), (4, 3, 2), fem.Fe.Q(3, 1), origin=origin)
+# xyz_grid = np.meshgrid(*[np.arange(n_dofs) for n_dofs in g.dofs_shape], indexing='ij')
+# ref_barycenters = (np.stack(xyz_grid, axis=-1).reshape(-1, 3) + 0.5) * g.step + origin
+# assert np.allclose(g.nodes(), ref_barycenters)
+
+#@pytest.mark.skip()
+def test_bc_all():
+ origin = [-4, -5, -6]
+ g = fem.FEM(fem.Grid((100, 150, 200), (4, 3, 2), origin=origin), fem.Fe.Q(3, 1))
+ ref_bc_coord = np.zeros(())
+ nx,ny,nz = g.dofs_shape
+ dof_grid = np.meshgrid(*[np.arange(n_dofs) for n_dofs in g.dofs_shape], indexing='ij')
+ dof_coords = np.stack(dof_grid, axis=-1).reshape(-1, 3)
+ ref_bc_coord = [dof_coords[g.natur_map[i_dof]] for i_dof in range(g.n_bc_dofs)]
+ assert np.all(g.bc_coords == ref_bc_coord)
+ assert np.allclose(g.bc_points[0], origin)
+ max_corner = g.grid.origin + g.grid.dimensions
+ bc_points = g.bc_points
+ assert np.allclose(bc_points[g.dofs_shape[2]-1], [origin[0], origin[1], max_corner[2]])
+ assert np.allclose(bc_points[g.dofs_shape[1]*g.dofs_shape[2]-1], [origin[0], max_corner[1], max_corner[2]])
+ assert np.allclose(bc_points[-1], max_corner)
+
+def grid_numbering_Q1(dim):
+ order = 1
+ g = fem.FEM(fem.Grid((100.0, 100.0, 100.0), 4), fem.Fe.Q(dim, order))
+ idx_to_coord = g.dof_coord_coef * g.step[None, :]
+ ref_barycenters = np.arange(g.grid.n_elements)
+ np.allclose(g.grid.barycenters(), ref_barycenters)
+
+
+
+#
+# def test_grid_numbering():
+# # Test Grid numbering
+# for dim in [1, 2, 3]:
+# grid_numbering_Q1(dim)
+# dim = 1
+# order = 2
+# g = fem.Grid(100.0, 4, fem.Fe.Q(dim, order))
+# print(g)
+#
+# dim = 2
+# order = 1
+# g = fem.Grid(100.0, 4, fem.Fe.Q(dim, order))
+# print(g)
+#
+# dim = 3
+# order = 1
+# g = fem.Grid(100.0, 3, fem.Fe.Q(dim, order))
+# print(g)
+#
+# def test_grid_nodes():
+
+#@pytest.mark.skip()
+def test_grid_bc():
+ g = fem.FEM(fem.Grid((10,), 2), fem.Fe.Q(1, 1))
+ assert np.all(g.bc_coords == np.array([[0], [2]]))
+ assert np.allclose(g.bc_points, np.array([[0], [10]]))
+
+ g = fem.FEM(fem.Grid((10, 10), 2), fem.Fe.Q(2, 1))
+ ref = np.array([[0, 0, 0, 1, 1, 2, 2, 2], [0, 1, 2, 0, 2, 0, 1, 2]]).T
+ assert np.all(g.bc_coords == ref)
+
+#@pytest.mark.skip()
+def test_laplace():
+ order = 1
+ N = 3
+ dim = 2
+ g = fem.FEM(fem.Grid(dim*[N], N), fem.Fe.Q(dim, order))
+ l = g.laplace.reshape((-1, g.fe.n_dofs, g.fe.n_dofs))
+ print("\nlaplace, 2d:\n", l)
+
+#@pytest.mark.skip()
+def test_grid_assembly():
+ for dim in range(1, 4):
+ order = 1
+ N = 3
+ g = fem.FEM(fem.Grid(dim*[30], N), fem.Fe.Q(dim, order))
+ K_const = np.diag(np.arange(1, dim + 1))
+ K_const = fem.tn_to_voigt(K_const[None, :, :])
+ K_field = K_const * np.ones(g.grid.n_elements)[:, None]
+ A = g.assembly_dense(K_field)
+ n_dofs = (N+1)**dim
+ assert A.shape == (n_dofs, n_dofs)
+
+#@pytest.mark.skip
+def test_solve_system():
+ for dim in range(1, 4):
+ order = 1
+ N = 3
+ g = fem.FEM(fem.Grid(dim * [30], N), fem.Fe.Q(dim, order))
+ K_const = np.diag(np.arange(1, dim + 1))
+ K_const = fem.tn_to_voigt(K_const[None, :, :])
+ K_field = K_const * np.ones(g.grid.n_elements)[:, None]
+ p_grads = np.eye(dim)
+ ref_pressure = ((g.nodes() - g.grid.origin) @ p_grads).T
+
+ # solve direct
+ pressure = g.solve_direct(K_field, p_grads)
+ assert pressure.shape == (dim, (N + 1) ** dim)
+ assert not np.any(np.isnan(pressure))
+ assert np.allclose(pressure, ref_pressure)
+
+ # solve sparse
+ pressure = g.solve_sparse(K_field, p_grads)
+ assert pressure.shape == (dim, (N + 1) ** dim)
+ assert not np.any(np.isnan(pressure))
+ assert np.allclose(pressure, ref_pressure)
+
+
+#def test_solve_sparse():
+
+
+
+
+#@pytest.mark.skip
+def test_solve_2d():
+ dim = 2
+ order = 1
+ N = 30
+ g = fem.FEM(fem.Grid(dim * [100], (20, 8)), fem.Fe.Q(dim, order))
+ x = g.grid.barycenters()[:, 0]
+ K_const = np.diag([1, 1])
+ #K_const = np.ones((dim, dim))
+ K_const = fields.tn_to_voigt(K_const[None, :, :])
+ K_field = K_const * x[:, None]
+ #K_field = K_const.T * np.ones_like(x)[:, None]
+ p_grads = np.eye(dim)
+ pressure = g.solve_direct(K_field, p_grads)
+ fem_plot.plot_pressure_fields(g, pressure)
+
+@pytest.mark.skip
+def test_solve_sparse_2d():
+ dim = 2
+ order = 1
+ domain_size = 100
+ #N = 30
+ g = fem.FEM(fem.Grid(dim * [domain_size], (20, 8), origin=-domain_size / 2), fem.Fe.Q(dim, order))
+ x = (g.grid.barycenters() - g.grid.origin)[:, 0]
+ K_const = np.diag([1, 1])
+ #K_const = np.ones((dim, dim))
+ K_const = fields.tn_to_voigt(K_const[None, :, :])
+ K_field = K_const * x[:, None]
+ #K_field = K_const.T * np.ones_like(x)[:, None]
+ p_grads = np.eye(dim)
+ pressure = g.solve_sparse(K_field, p_grads)
+
+ xy_grid = [np.linspace(0, g.grid.dimensions[i], g.dofs_shape[i]) for i in range(2)]
+ fem_plot.plot_pressure_fields(*xy_grid, pressure)
+
+#@pytest.mark.skip
+def test_solve_sparse_3d():
+ dim = 3
+ order = 1
+ domain_size = 100
+ #N = 8
+ steps = (9, 10, 11)
+ g = fem.FEM(fem.Grid(dim*[domain_size], steps), fem.Fe.Q(dim=3))
+ #x = g.barycenters()[:, 0]
+ #K_const = np.diag([1, 1, 1])
+ #K_const = np.ones((dim, dim))
+ K_const = np.array([1,1,1,0,0,0]) #fields.tn_to_voigt(K_const[None, :, :])
+ K_field = np.ones(g.grid.n_elements)[:, None] * K_const [None, :]
+ bc_pressure_gradient = [1, 0, 0]
+ bc_pressure_gradient = np.array(bc_pressure_gradient)[None, :]
+
+ #K_field = K_const.T * np.ones_like(x)[:, None]
+ #p_grads = np.eye(dim)
+ pressure = g.solve_sparse(K_field, bc_pressure_gradient)
+ xy_grid = [np.linspace(0, g.grid.dimensions[i], g.dofs_shape[i]) for i in range(2)]
+ #fem_plot.plot_pressure_fields(*xy_grid, pressure)
+ assert not np.any(np.isnan(pressure))
+
+
+@pytest.mark.skip()
+def test_upsacale_2d():
+ K_const = np.diag([10, 100])
+ K_const = fields.tn_to_voigt(K_const[None, :, :])
+ K_field = K_const * np.ones((8, 8))[:, :, None]
+ K_eff = fem.upscale(K_field)
+ assert np.allclose(K_eff, K_const[0, :])
+
+
+# def test_upscale_parallel_plates():
+# cube = [1, 1, 1]
+# for dim in [2, 3]:
+# plates = dfn.FractureSet.parallel_plates(
+# box = cube,
+# normal = [1, 0, 0]
+# )
+
+
+def single_fracture_distance_function():
+ """
+ Determine effective tensor as a function of the voxel center distance from
+ the fracture plane and angle.
+ lattitude : 0 - pi/4 : 9
+ longitude : 0 - pi/4, up to pi/2 for validation : 9
+ distance : 9 levels
+ :return: about 1000 runs, also test of perfromance
+ use 128^3 grid
+ """
+ pass
\ No newline at end of file
diff --git a/tests/upscale/test_two_scale.py b/tests/upscale/test_two_scale.py
new file mode 100644
index 0000000..387e619
--- /dev/null
+++ b/tests/upscale/test_two_scale.py
@@ -0,0 +1,632 @@
+"""
+Test of homogenization techniquest within a two-scale problem.
+- reference solution is evaluated by Flow123d, using direct rasterization of full DFN sample
+ The pressure field is projected to the nodes of the rectangular grid,
+ velocity filed is averaged over rectangular cells.
+
+- two-scale solution involves:
+ 1. homogenization of DFN to the rectangular grid ; general permeability tensor field
+ 2. custom 3d solver for the rectangular grid is used to solve the coarse problem
+
+- various homogenization techniques could be used, homogenization time is evaluated and compared.
+
+TODO:
+- use large fractures in first test in order to have no difference between ellipse and rectangle shapes
+- set parameters to have signifficant difference between fracture and bulk velocities
+- fix ref velocity projection
+- homogenization test
+- implement ellipse shape meshing for flow
+- implement rasterization (decovalex based) for rectangles and polygons
+"""
+
+import pytest
+import shutil
+from pathlib import Path
+
+import logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger()
+
+
+import numpy as np
+import pyvista as pv
+
+from bgem import stochastic
+from bgem.gmsh import gmsh, options
+from mesh_class import Mesh
+from bgem.core import call_flow, dotdict, workdir as workdir_mng
+from bgem.upscale import fem_plot, fem, voigt_to_tn, FracturedMedia, decovalex_dfnmap as dmap
+from scipy.interpolate import LinearNDInterpolator
+
+script_dir = Path(__file__).absolute().parent
+workdir = script_dir / "sandbox"
+from joblib import Memory
+memory = Memory(workdir, verbose=0)
+
+
+
+def fracture_random_set(seed, size_range, max_frac = 1e21):
+ rmin, rmax = size_range
+ box_dimensions = (rmax, rmax, rmax)
+ fr_cfg_path = script_dir/"fractures_conf.yaml"
+ #with open() as f:
+ # pop_cfg = yaml.load(f, Loader=yaml.SafeLoader)
+ fr_pop = stochastic.Population.from_cfg(fr_cfg_path, box_dimensions, shape=stochastic.EllipseShape())
+ if fr_pop.mean_size() > max_frac:
+ common_range = fr_pop.common_range_for_sample_size(sample_size=max_frac)
+ fr_pop = fr_pop.set_sample_range(common_range)
+ print(f"fr set range: {[rmin, rmax]}, fr_lim: {max_frac}, mean population size: {fr_pop.mean_size()}")
+ pos_gen = stochastic.UniformBoxPosition(fr_pop.domain)
+ np.random.seed(seed)
+ fractures = fr_pop.sample(pos_distr=pos_gen, keep_nonempty=True)
+ #for fr in fractures:
+ # fr.region = gmsh.Region.get("fr", 2)
+ return fractures
+
+def fracture_fixed_set():
+ rmin, rmax = size_range
+ box_dimensions = (rmax, rmax, rmax)
+ fr_cfg_path = script_dir/"fractures_conf.yaml"
+ #with open() as f:
+ # pop_cfg = yaml.load(f, Loader=yaml.SafeLoader)
+ fr_pop = stochastic.Population.from_cfg(fr_cfg_path, box_dimensions, shape=stochastic.EllipseShape())
+ if fr_pop.mean_size() > max_frac:
+ common_range = fr_pop.common_range_for_sample_size(sample_size=max_frac)
+ fr_pop = fr_pop.set_sample_range(common_range)
+ print(f"fr set range: {[rmin, rmax]}, fr_lim: {max_frac}, mean population size: {fr_pop.mean_size()}")
+ pos_gen = stochastic.UniformBoxPosition(fr_pop.domain)
+ np.random.seed(seed)
+ stochastic.FractureSet.from_list()
+ fractures = fr_pop.sample(pos_distr=pos_gen, keep_nonempty=True)
+ #for fr in fractures:
+ # fr.region = gmsh.Region.get("fr", 2)
+ return fractures
+
+
+
+
+def ref_solution_mesh(work_dir, domain_dimensions, fractures, fr_step, bulk_step):
+ factory = gmsh.GeometryOCC("homo_cube", verbose=True)
+ gopt = options.Geometry()
+ gopt.Tolerance = 0.0001
+ gopt.ToleranceBoolean = 0.001
+ box = factory.box(domain_dimensions)
+
+ # TODO: use shape from fractures
+ fractures, fr_region_map = create_fractures_rectangles(factory, fractures, factory.rectangle())
+ fractures_group = factory.group(*fractures).intersect(box)
+ box_fr, fractures_fr = factory.fragment(box, fractures_group)
+ fractures_fr.mesh_step(fr_step) #.set_region("fractures")
+ objects = [box_fr, fractures_fr]
+ factory.write_brep(str(work_dir / factory.model_name) )
+ #factory.mesh_options.CharacteristicLengthMin = cfg.get("min_mesh_step", cfg.boreholes_mesh_step)
+ factory.mesh_options.CharacteristicLengthMax = bulk_step
+ #factory.mesh_options.Algorithm = options.Algorithm3d.MMG3D
+
+ # mesh.Algorithm = options.Algorithm2d.MeshAdapt # produce some degenerated 2d elements on fracture boundaries ??
+ # mesh.Algorithm = options.Algorithm2d.Delaunay
+ # mesh.Algorithm = options.Algorithm2d.FrontalDelaunay
+
+ factory.mesh_options.Algorithm = options.Algorithm3d.Delaunay
+ #mesh.ToleranceInitialDelaunay = 0.01
+ # mesh.ToleranceEdgeLength = fracture_mesh_step / 5
+ #mesh.CharacteristicLengthFromPoints = True
+ #factory.mesh_options.CharacteristicLengthFromCurvature = False
+ #factory.mesh_options.CharacteristicLengthExtendFromBoundary = 2 # co se stane if 1
+ #mesh.CharacteristicLengthMin = min_el_size
+ #mesh.CharacteristicLengthMax = max_el_size
+
+ #factory.keep_only(*objects)
+ #factory.remove_duplicate_entities()
+ factory.make_mesh(objects, dim=3)
+ #factory.write_mesh(me gmsh.MeshFormat.msh2) # unfortunately GMSH only write in version 2 format for the extension 'msh2'
+ f_name = work_dir / (factory.model_name + ".msh2")
+ factory.write_mesh(str(f_name), format=gmsh.MeshFormat.msh2)
+ return f_name, fr_region_map
+
+def fr_cross_section(fractures, cross_to_r):
+ return [cross_to_r * fr.r for fr in fractures]
+
+
+def fr_field(mesh, dfn, reg_id_to_fr, fr_values, bulk_value):
+ """
+ Provide impoicit fields on fractures as input.
+ :param mesh:
+ :param fractures:
+ :param fr_values:
+ :param bulk_value:
+ :return:
+ """
+ fr_map = mesh.fr_map(dfn, reg_id_to_fr) # np.array of fracture indices of elements, n_frac for nonfracture elements
+ fr_values_ = np.concatenate((
+ np.array(fr_values),
+ np.atleast_1d(bulk_value)))
+ field_vals = fr_values_[fr_map]
+ return field_vals
+
+
+
+
+# def velocity_p0(grid_step, min_corner, max_corner, mesh, values):
+# """
+# Pressure P1 field projection
+# - P0 pressure in barycenters
+# - use interpolation to construct P1 Structured grid
+# Interpolate: discrete points -> field, using RBF placed at points
+# Sample: field -> nodal data, evaluate nodes in source field
+#
+# Velocity P0 projection
+# 1. get array of barycenters and element volumes
+# 2. ijk cell coords of each source point
+# 3. weights = el_volume / np.add.at(cell_vol_sum, ijk, el_volume)[ijk[:]]
+# 4. np.add.at(cell_velocities, ijk, weights * velocities)
+# :return:
+# """
+# pass
+
+@memory.cache
+def reference_solution(fr_media: FracturedMedia, dimensions, bc_gradient):
+ dfn = fr_media.dfn
+ bulk_conductivity = fr_media.conductivity
+
+ workdir = script_dir / "sandbox"
+ workdir.mkdir(parents=True, exist_ok=True)
+
+ # Input crssection and conductivity
+ mesh_file, fr_region_map = ref_solution_mesh(workdir, dimensions, dfn, fr_step=7, bulk_step=7)
+ full_mesh = Mesh.load_mesh(mesh_file, heal_tol = 0.001) # gamma
+ fields = dict(
+ conductivity=fr_field(full_mesh, dfn, fr_region_map, fr_media.fr_conductivity, bulk_conductivity),
+ cross_section=fr_field(full_mesh, dfn, fr_region_map, fr_media.fr_cross_section, 1.0)
+ )
+ cond_file = full_mesh.write_fields(str(workdir / "input_fields.msh2"), fields)
+ cond_file = Path(cond_file)
+ cond_file = cond_file.rename(cond_file.with_suffix(".msh"))
+ # solution
+ flow_cfg = dotdict(
+ flow_executable=[
+ "/home/jb/workspace/flow123d/bin/fterm",
+ "--no-term",
+# - flow123d/endorse_ci:a785dd
+# - flow123d/ci-gnu:4.0.0a_d61969
+ "dbg",
+ "run",
+ "--profiler_path",
+ "profile"
+ ],
+ mesh_file=cond_file,
+ pressure_grad=bc_gradient
+ )
+ f_template = "flow_upscale_templ.yaml"
+ shutil.copy( (script_dir / f_template), workdir)
+ with workdir_mng(workdir):
+ flow_out = call_flow(flow_cfg, f_template, flow_cfg)
+
+ # Project to target grid
+ print(flow_out)
+ #vel_p0 = velocity_p0(target_grid, flow_out)
+ # projection of fields
+ return flow_out
+
+def project_ref_solution_(flow_out, grid: fem.Grid):
+ # Velocity P0 projection
+ # 1. get array of barycenters (source points) and element volumes of the fine mesh
+ # 2. ijk cell coords of each source point
+ # 3. weights = el_volume / np.add.at(cell_vol_sum, ijk, el_volume)[ijk[:]]
+ # 4. np.add.at(cell_velocities, ijk, weights * velocities)
+ # :return:
+ pvd_content = pv.get_reader(flow_out.hydro.spatial_file.path)
+ pvd_content.set_active_time_point(0)
+ dataset = pvd_content.read()[0] # Take first block of the Multiblock dataset
+ cell_centers_coords = dataset.cell_centers().points
+ grid_min_corner = -grid.dimensions / 2
+ centers_ijk_grid = (cell_centers_coords - grid_min_corner) // grid.step[None, :]
+ centers_ijk_grid = centers_ijk_grid.astype(np.int32)
+ assert np.alltrue(centers_ijk_grid < grid.shape[None, :])
+
+ grid_cell_idx = centers_ijk_grid[:, 0] + grid.shape[0] * (centers_ijk_grid[:, 1] + grid.shape[1] * centers_ijk_grid[:, 2])
+ sized = dataset.compute_cell_sizes()
+ cell_volume = np.abs(sized.cell_data["Volume"])
+ grid_sum_cell_volume = np.zeros(grid.n_elements)
+ np.add.at(grid_sum_cell_volume, grid_cell_idx, cell_volume)
+ weights = cell_volume[:] / grid_sum_cell_volume[grid_cell_idx[:]]
+
+ velocities = dataset.cell_data['velocity_p0']
+ grid_velocities = np.zeros((grid.n_elements, 3))
+ wv = weights[:, None] * velocities
+ for ax in [0, 1, 2]:
+ np.add.at(grid_velocities[:, ax], grid_cell_idx, wv[:, ax])
+
+ return grid_velocities.reshape((*grid.shape, 3))
+
+def det33(mat):
+ """
+ mat: (N, 3, 3)
+ :param mat:
+ :return: (N, )
+ """
+ return sum(
+ np.prod(mat[:, [(col, (row+step)%3) for col in range(3)]])
+ for row in [0, 1, 2] for step in [1,2]
+ )
+
+@memory.cache
+def refine_barycenters(element, level):
+ """
+ Produce refinement of given element (triangle or tetrahedra), shape (N, n_vertices, 3)
+ and return barycenters of refined subelements.
+ """
+ return np.mean(refine_element(element, level), axis=1)
+
+@memory.cache
+def project_adaptive_source_quad(flow_out, grid: fem.Grid):
+ grid_cell_volume = np.prod(grid.step)/27
+
+ ref_el_2d = np.array([(0, 0), (1, 0), (0, 1)])
+ ref_el_3d = np.array([(0, 0, 0), (1, 0, 0), (0, 1, 0), (0, 0, 1)])
+
+ pvd_content = pv.get_reader(flow_out.hydro.spatial_file.path)
+ pvd_content.set_active_time_point(0)
+ dataset = pvd_content.read()[0] # Take first block of the Multiblock dataset
+
+ velocities = dataset.cell_data['velocity_p0']
+ cross_section = dataset.cell_data['cross_section']
+
+
+
+ p_dataset = dataset.cell_data_to_point_data()
+ p_dataset.point_data['velocity_magnitude'] = np.linalg.norm(p_dataset.point_data['velocity_p0'], axis=1)
+ plane = pv.Plane(center=(0, 0, 0), direction=(0, 0, 1))
+ cut_dataset = p_dataset.clip_surface(plane)
+
+ plotter = pv.Plotter()
+ plotter.add_mesh(p_dataset, color='white', opacity=0.3, label='Original Dataset')
+ plotter.add_mesh(cut_dataset, scalars='velocity_magnitude', cmap='viridis', label='Velocity Magnitude')
+
+
+ # Add legend and show the plot
+ plotter.add_scalar_bar(title='Velocity Magnitude')
+ plotter.add_legend()
+ plotter.show()
+
+
+ #num_cells = dataset.n_cells
+ #shifts = np.zeros((num_cells, 3))
+ #transform_matrices = np.zeros((num_cells, 3, 3))
+ #volumes = np.zeros(num_cells)
+
+ weights_sum = np.zeros((grid.n_elements,))
+ grid_velocities = np.zeros((grid.n_elements, 3))
+ levels = np.zeros(dataset.n_cells, dtype=np.int32)
+ # Loop through each cell
+ for i in range(dataset.n_cells):
+ cell = dataset.extract_cells(i)
+ points = cell.points
+
+ if len(points) < 3:
+ continue # Skip cells with less than 3 vertices
+
+ # Shift: the first vertex of the cell
+ shift = points[0]
+ #shifts[i] = shift
+
+ transform_matrix = points[1:] - shift
+ if len(points) == 4: # Tetrahedron
+ # For a tetrahedron, we use all three vectors formed from the first vertex
+ #transform_matrices[i] = transform_matrix[:3].T
+ # Volume calculation for a tetrahedron:
+ volume = np.abs(np.linalg.det(transform_matrix[:3])) / 6
+ ref_el = ref_el_3d
+ elif len(points) == 3: # Triangle
+ # For a triangle, we use only two vectors
+ #transform_matrices[i, :2] = transform_matrix.T
+ # Area calculation for a triangle:
+ volume = 0.5 * np.linalg.norm(np.cross(transform_matrix[0], transform_matrix[1])) * cross_section[i]
+ ref_el = ref_el_2d
+ level = max(int(np.log2(volume/grid_cell_volume) / 3.0), 0)
+ levels[i] = level
+ ref_barycenters = refine_barycenters(ref_el[None, :, :],level)
+ barycenters = shift[None, :] + ref_barycenters @ transform_matrix
+ grid_indices = grid.project_points(barycenters)
+ weights_sum[grid_indices] += volume
+ grid_velocities[grid_indices] += volume * velocities[i]
+ print(np.bincount(levels))
+ grid_velocities = grid_velocities / weights_sum[:, None]
+ return grid_velocities
+
+
+# Define transformation matrices and index mappings for 2D and 3D refinements
+_transformation_matrices = {
+ 3: np.array([
+ [1, 0, 0], # Vertex 0
+ [0, 1, 0], # Vertex 1
+ [0, 0, 1], # Vertex 2
+ [0.5, 0.5, 0], # Midpoint between vertices 0 and 1
+ [0, 0.5, 0.5], # Midpoint between vertices 1 and 2
+ [0.5, 0, 0.5], # Midpoint between vertices 0 and 2
+ ]),
+ 4: np.array([
+ [1, 0, 0, 0], # Vertex 0
+ [0, 1, 0, 0], # Vertex 1
+ [0, 0, 1, 0], # Vertex 2
+ [0, 0, 0, 1], # Vertex 3
+ [0.5, 0.5, 0, 0], # Midpoint between vertices 0 and 1
+ [0.5, 0, 0.5, 0], # Midpoint between vertices 0 and 2
+ [0.5, 0, 0, 0.5], # Midpoint between vertices 0 and 3
+ [0, 0.5, 0.5, 0], # Midpoint between vertices 1 and 2
+ [0, 0.5, 0, 0.5], # Midpoint between vertices 1 and 3
+ [0, 0, 0.5, 0.5], # Midpoint between vertices 2 and 3
+ ])
+}
+
+_index_maps = {
+ 3: np.array([
+ [0, 3, 5], # Triangle 1
+ [3, 1, 4], # Triangle 2
+ [3, 4, 5], # Triangle 3
+ [5, 4, 2] # Triangle 4
+ ]),
+ 4: np.array([
+ [0, 4, 5, 6], # Tetrahedron 1
+ [1, 4, 7, 8], # Tetrahedron 2
+ [2, 5, 7, 9], # Tetrahedron 3
+ [3, 6, 8, 9], # Tetrahedron 4
+ [4, 5, 6, 7], # Center tetrahedron 1
+ [4, 7, 8, 6], # Center tetrahedron 2
+ [5, 7, 9, 6], # Center tetrahedron 3
+ [6, 8, 9, 7], # Center tetrahedron 4
+ ])
+}
+
+
+def refine_element(element, level):
+ """
+ Recursively refines an element (triangle or tetrahedron) in space using matrix multiplication.
+
+ :param element: A numpy array of shape (1, N, M), where N is the number of vertices (3 or 4).
+ :param level: Integer, the level of refinement.
+ :return: A numpy array containing the vertices of all refined elements.
+ """
+ if level == 0:
+ return element
+ n_tria, num_vertices, dim = element.shape
+ assert n_tria == 1
+ assert num_vertices == dim + 1
+ transformation_matrix = _transformation_matrices[num_vertices]
+ index_map = _index_maps[num_vertices]
+ # Generate all nodes by applying the transformation matrix to the original vertices
+ nodes = np.dot(transformation_matrix, element[0])
+ # Construct new elements using advanced indexing
+ new_elements = nodes[index_map]
+ # Recursively refine each smaller element
+ result = np.vstack([
+ refine_element(new_elem[None, :, :], level - 1) for new_elem in new_elements
+ ])
+ return result
+
+
+def plot_triangles(triangles):
+ """
+ Plots a series of refined triangles.
+
+ :param triangles: A numpy array of shape (N, 3, 2) containing the vertices of all triangles.
+ """
+ import matplotlib.pyplot as plt
+ import matplotlib.tri as tri
+
+ plt.figure(figsize=(8, 8))
+ ax = plt.gca()
+
+ # Flatten the array for plotting
+ triangles_flat = triangles.reshape(-1, 2)
+ tri_indices = np.arange(len(triangles_flat)).reshape(-1, 3)
+
+ # Create a Triangulation object
+ triangulation = tri.Triangulation(triangles_flat[:, 0], triangles_flat[:, 1], tri_indices)
+
+ # Plot the triangulation
+ ax.triplot(triangulation, 'ko-')
+
+ # Setting the aspect ratio to be equal to ensure the triangle is not distorted
+ ax.set_aspect('equal')
+
+ # Turn off the grid
+ ax.grid(False)
+
+ # Setting the limits to get a better view
+ ax.set_xlim(triangles_flat[:, 0].min() - 0.1, triangles_flat[:, 0].max() + 0.1)
+ ax.set_ylim(triangles_flat[:, 1].min() - 0.1, triangles_flat[:, 1].max() + 0.1)
+
+ # Add a title
+ plt.title('Refined Triangles Visualization')
+ plt.show()
+
+@pytest.mark.skip
+def test_refine_triangle():
+ # Example usage
+ initial_triangle = np.array([[[0, 0], [1, 0], [0.5, np.sqrt(3) / 2]]])
+ L = 2 # Set the desired level of refinement
+
+ # Refine the triangle
+ refined_triangles = refine_element(initial_triangle, L)
+ print(f"Refined Triangles at Level {L}:")
+ print(refined_triangles)
+ print("Total triangles:", len(refined_triangles))
+
+ plot_triangles(refined_triangles)
+
+
+
+def plot_tetrahedra(tetrahedra):
+ """
+ Plots a series of refined tetrahedra in 3D.
+
+ :param tetrahedra: A numpy array of shape (N, 4, 3) containing the vertices of all tetrahedra.
+ """
+ import matplotlib.pyplot as plt
+ from mpl_toolkits.mplot3d.art3d import Poly3DCollection
+
+ fig = plt.figure(figsize=(10, 8))
+ ax = fig.add_subplot(111, projection='3d')
+
+ # Flatten the array for plotting
+ for tet in tetrahedra:
+ vtx = tet.reshape(-1, 3)
+ tri = [[vtx[0], vtx[1], vtx[2]],
+ [vtx[0], vtx[1], vtx[3]],
+ [vtx[0], vtx[2], vtx[3]],
+ [vtx[1], vtx[2], vtx[3]]]
+ for s in tri:
+ poly = Poly3DCollection([s], edgecolor='k', alpha=0.2, facecolor=np.random.rand(3, ))
+ ax.add_collection3d(poly)
+
+ ax.set_xlim(0, 1)
+ ax.set_ylim(0, 1)
+ ax.set_zlim(0, 1)
+ plt.title('Refined Tetrahedra Visualization')
+ plt.show()
+
+@pytest.mark.skip
+def test_refine_tetra():
+ initial_tetrahedron = np.array(
+ [[[0, 0, 0], [1, 0, 0], [0.5, np.sqrt(3) / 2, 0], [0.5, np.sqrt(3) / 6, np.sqrt(6) / 3]]])
+ L = 1 # Set the desired level of refinement
+
+ # Refine the tetrahedron
+ refined_tetrahedra = refine_element(initial_tetrahedron, L)
+ plot_tetrahedra(refined_tetrahedra)
+
+
+"""
+Projection using aditional quad points on the source mesh.
+We need to generate baricenters of an L-order refinement of a simplex.
+Vertices of first reinement of triangle:
+vertices coords relative to V0, V1, V2 (bary coords)
+T0: (1, 0, 0), (1/2, 1/2, 0), (1/2, 0, 1/2)
+T1: (1/2, 1/2, 0), (0, 1, 0), (0, 1/2, 1/2)
+T2: (1/2, 1/2, 0), (0, 1/2, 1/2), (0, 0, 1),
+T3: (1/2, 1/2, 0), (1/2, 1/2, 0), (1/2, 0, 1/2)
+
+ON size 2 grid:
+T0: (1, 0, 0), (1/2, 1/2, 0), (1/2, 0, 1/2)
+T1: (1/2, 1/2, 0), (0, 1, 0), (0, 1/2, 1/2)
+T2: (1/2, 1/2, 0), (0, 1/2, 1/2), (0, 0, 1),
+T3: (1/2, 1/2, 0), (1/2, 1/2, 0), (1/2, 0, 1/2)
+
+... tensor (4, 3, 3) ... n_childes, n_source_veritices, n_result verites (same as source)
+source_vertices ... shape (n_vertices, coords_3d)
+.... T[:n_childs, :n_vertices, :, None] * source_vertices[None, :, None, :]
+
+Iterative aplication of the tensor + finaly barycenters.
+"""
+
+def project_ref_solution(flow_out, grid: fem.Grid):
+ # Velocity P0 projection
+ # 1. get array of barycenters (source points) and element volumes of the fine mesh
+ # 2. ijk cell coords of each source point
+ # 3. weights = el_volume / np.add.at(cell_vol_sum, ijk, el_volume)[ijk[:]]
+ # 4. np.add.at(cell_velocities, ijk, weights * velocities)
+ # :return:
+ pvd_content = pv.get_reader(flow_out.hydro.spatial_file.path)
+ pvd_content.set_active_time_point(0)
+ dataset = pvd_content.read()[0] # Take first block of the Multiblock dataset
+ cell_centers_coords = dataset.cell_centers().points
+ velocities = dataset.cell_data['velocity_p0']
+ interpolator = LinearNDInterpolator(cell_centers_coords, velocities, 0.0)
+ grid_velocities = interpolator(grid.barycenters())
+ return grid_velocities
+
+
+def homo_decovalex(fr_media: FracturedMedia, grid:fem.Grid):
+ """
+ Homogenize fr_media to the conductivity tensor field on grid.
+ :return: conductivity_field, np.array, shape (n_elements, n_voight)
+ """
+ ellipses = [dmap.Ellipse(fr.normal, fr.center, fr.scale) for fr in fr_media.dfn]
+ d_grid = dmap.Grid.make_grid(grid.origin, grid.step, grid.dimensions)
+ fractures = dmap.map_dfn(d_grid, ellipses)
+ fr_transmissivity = fr_media.fr_conductivity * fr_media.fr_cross_section
+ k_iso_zyx = dmap.permIso(d_grid, fractures, fr_transmissivity, fr_media.conductivity)
+ k_iso_xyz = grid.cell_field_C_like(k_iso_zyx)
+ k_voigt = k_iso_xyz[:, None] * np.array([1, 1, 1, 0, 0, 0])[None, :]
+ return k_voigt
+
+#@pytest.mark.skip
+def test_two_scale():
+ # Fracture set
+ domain_size = 100
+ #fr_range = (30, domain_size)
+ fr_range = (50, domain_size)
+
+ # Random fractures
+ # dfn = fracture_random_set(123, fr_range, max_frac=10)
+
+ # Fixed fracture set
+ shape_id = stochastic.EllipseShape.id
+ fr = lambda c, n : stochastic.Fracture(shape_id, [100, 100], c, n)
+ fractures = [
+ #fr([30,-10, 10], [0, 1, 0]),
+ fr([0,0,0], [1, 1, 0]),
+ #fr([-30,10,-10], [0, 1, 0])
+ ]
+ dfn = stochastic.FractureSet.from_list(fractures)
+ # Cubic law transmissvity
+ fr_media = FracturedMedia.fracture_cond_params(dfn, 1e-4, 0.001)
+
+ # Fractures and properties from DFNWorks
+ #fr_media = FracturedMedia.from_dfn_works("", bulk_conductivity)
+
+
+ # Coarse Problem
+ #steps = (50, 60, 70)
+ steps = (9, 10, 11)
+ #steps = (50, 60, 70)
+ #steps = (3, 4, 5)
+ fem_grid = fem.fem_grid(domain_size, steps, fem.Fe.Q(dim=3), origin=-domain_size / 2)
+ bc_pressure_gradient = [1, 0, 0]
+ grid_cond = homo_decovalex(fr_media, fem_grid.grid)
+ #grid_cond = np.ones(grid.n_elements)[:, None] * np.array([1, 1, 1, 0, 0, 0])[None, :]
+ pressure = fem_grid.solve_sparse(grid_cond, np.array(bc_pressure_gradient)[None, :])
+ assert not np.any(np.isnan(pressure))
+
+ flow_out = reference_solution(fr_media, fem_grid.grid.dimensions, bc_pressure_gradient)
+ project_fn = project_adaptive_source_quad
+ #project_fn = project_ref_solution
+ #ref_velocity_grid = grid.cell_field_F_like(project_fn(flow_out, grid).reshape((-1, 3)))
+ ref_velocity_grid = project_fn(flow_out, fem_grid.grid).reshape((-1, 3))
+
+ grad_pressure = fem_grid.field_grad(pressure) # (n_vectors, n_els, dim)
+ grad_pressure = grad_pressure[0, :, :][:, :, None] # (n_els, dim, 1)
+ velocity = -voigt_to_tn(grid_cond) @ grad_pressure # (n_els, dim, 1)
+ #velocity = grad_pressure # (n_els, dim, 1)
+ velocity = velocity[:, :, 0] # transpose
+ velocity_zyx = fem_grid.grid.cell_field_F_like(velocity) #.reshape(*grid.n_steps, -1).transpose(2,1,0,3).reshape((-1, 3))
+ # Comparison
+ # origin = [0, 0, 0]
+
+ #pv_grid = pv.StructuredGrid()
+ x, y, z = np.meshgrid(*fem_grid.grid.axes_linspace(), indexing='ij')
+ pv_grid = pv.StructuredGrid(x, y, z)
+ #points = grid.nodes()
+ pv_grid_centers = pv_grid.cell_centers().points
+ print(fem_grid.grid.barycenters())
+ print(pv_grid_centers)
+
+ cell_fields = dict(
+ ref_velocity = ref_velocity_grid,
+ homo_velocity = velocity_zyx,
+ diff = velocity_zyx - ref_velocity_grid,
+ homo_cond = fem_grid.grid.cell_field_F_like(grid_cond),
+ )
+ point_fields = dict(
+ homo_pressure=pressure[0]
+ )
+ pv_grid = fem_plot.grid_fields_vtk(fem_grid.grid, cell_fields, vtk_path=workdir/'test_result.vtk')
+ plotter = fem_plot.create_plotter() #off_screen=True, window_size=(1024, 768))
+ plotter.add_mesh(pv_grid, scalars='ref_velocity')
+ plotter.show()
+ #pv_grid.dimensions = grid.n_steps + 1
+ #pv_grid.cell_data['ref_velocity'] = np.arange(22*20*18).reshape((18,20,22)).transpose((2, 1, 0)).reshape((-1,))
+
+ #pv_grid.save(str(workdir / "test_result.vtk"))
+
diff --git a/tests/upscale/test_voxelize.py b/tests/upscale/test_voxelize.py
new file mode 100644
index 0000000..5aee516
--- /dev/null
+++ b/tests/upscale/test_voxelize.py
@@ -0,0 +1,419 @@
+"""
+Test of homogenization algorithms from voxelize.py
+- Homogenization of bulk constant conductivity + discreate fractures with size dependent conductivity.
+ Reference is decovalex slow solution modified for anisotropic regular grid.
+ This assigns The same conductivity to all intersection cells.
+
+ In order to develop more precises homogenization techniques, we must use two-scale test problems.
+"""
+import pytest
+import fixtures
+
+from typing import *
+import yaml
+import shutil
+from pathlib import Path
+from scipy import integrate
+
+import logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger()
+
+
+import numpy as np
+import attrs
+import pyvista as pv
+
+from bgem import stochastic
+from bgem import fn
+from bgem.gmsh import gmsh, options
+from mesh_class import Mesh
+from bgem.core import call_flow, dotdict, workdir as workdir_mng
+from bgem.upscale import *
+from bgem.upscale import fem_plot
+from bgem.upscale.voxelize import base_shape_interior_grid
+
+script_dir = Path(__file__).absolute().parent
+workdir = script_dir / "sandbox"
+from joblib import Memory
+memory = Memory(workdir, verbose=0)
+
+
+def bulk_sphere_field(grid: Grid, out_sphere_value, in_sphere_value):
+ r = np.min(grid.dimensions) * 0.8 / 2
+ center = grid.grid_center()
+ field = np.full(grid.shape, out_sphere_value).flatten()
+ in_sphere = np.linalg.norm(grid.barycenters() - center, axis=1) < r
+ field[in_sphere] = in_sphere_value
+ return field
+
+def bulk_perm_field(grid: Grid):
+ """
+ This bulk field has high permeability out of a sphere that should transfer
+ the outer BC to the inner sphere of lower conductivity. There fore the
+ equivalent tensor should be rotation invariant, i.e. if we rotate fractures in the sphere
+ the equivalent tensor T of the whole domain should match the Q.T @T' @Q, if T' is
+ equivalent tensor of fractures rotated by matrix Q.T.
+ This allows to test correct rasterization with respect to different rotations.
+ :param grid:
+ :return:
+ """
+ return bulk_sphere_field(grid, 1, 1e-10)[:, None, None] * np.eye(3)
+
+
+def drop_tuple_item(x, i) :
+ return (*x[:i], *x[i+1:])
+
+def insert_tuple_item(x, i, item):
+ return (*x[:i], item, *x[i:])
+
+
+def product_concatenate(a, b, axis=0):
+ assert len(a.shape) == len(b.shape)
+ a_shape = drop_tuple_item(a.shape, axis)
+ b_shape = drop_tuple_item(b.shape, axis)
+ common_shape = np.broadcast_shapes(a_shape, b_shape)
+ a_new_shape = insert_tuple_item(common_shape, axis, a.shape[axis])
+ b_new_shape = insert_tuple_item(common_shape, axis, b.shape[axis])
+ result = np.concatenate((
+ np.broadcast_to(a, a_new_shape),
+ np.broadcast_to(b, b_new_shape)
+ ), axis=axis)
+ return result
+
+
+def probe_fr_intersection(fr_set: stochastic.FractureSet, grid: Grid):
+ domain = FracturedDomain(fr_set, np.ones(len(fr_set)), grid)
+ i_cell = []
+ i_fracture = []
+ min_grid_step = min(grid.step)
+
+ for i in range(len(fr_set)):
+ radius = np.max(fr_set.radius[i])
+ step = 0.5 * min_grid_step / radius
+ ref_points_xy = base_shape_interior_grid(fr_set.base_shape, step)
+ #z_coord = np.array([-min_grid_step/2.1, 0, min_grid_step/2.1])
+ n_z = np.abs(fr_set.normal[i, 2])
+ n_xy = np.linalg.norm(fr_set.normal[i, :2])
+ n_max = max(n_z, n_xy)
+ n_min = min(n_z, n_xy)
+
+ z1 = min_grid_step * n_min / n_max
+ z_coord = np.array([-z1/8, 0, z1/8])
+ ref_points_xyz = product_concatenate(ref_points_xy[:, None, :], z_coord[None, :, None], axis=2).reshape(-1, 3)
+
+ actual_points = (fr_set.transform_mat[i] @ ref_points_xyz[:, :, None])[:, :, 0] + fr_set.center[i]
+ cell_indices = np.unique(grid.project_points(actual_points))
+ i_cell.extend(cell_indices.tolist())
+ i_fracture.extend(len(cell_indices)*[i])
+ return Intersection.const_isec(domain, i_cell, i_fr, 1.0)
+
+def plot_isec_fields(intersections: List[Intersection], names: List[str], outpath: Path):
+ """
+ Assume common grid
+ :param intersections:
+ :return:
+ """
+ grid = intersections[0].grid
+ cell_fields = {n: isec.cell_field() for n, isec in zip(names, intersections)}
+
+ pv_grid = fem_plot.grid_fields_vtk(grid, cell_fields, vtk_path=outpath)
+
+ #plotter = fem_plot.create_plotter() # off_screen=True, window_size=(1024, 768))
+ #plotter.add_mesh(pv_grid, scalars='cell_field')
+ #plotter.show()
+
+def plot_isec_fields2(isec: Intersection, in_field, out_field, outpath: Path):
+ """
+ Assume common grid
+ :param intersections:
+ :return:
+ """
+ grid = isec.grid
+ cell_fields = {
+ 'cell_field': isec.cell_field(),
+ 'in_field' : in_field,
+ 'out_field' : out_field}
+
+ pv_grid = fem_plot.grid_fields_vtk(grid, cell_fields, vtk_path=outpath)
+
+ #plotter = fem_plot.create_plotter() # off_screen=True, window_size=(1024, 768))
+ #plotter.add_mesh(pv_grid, scalars='cell_field')
+ #plotter.show()
+
+
+
+def compare_intersections(isec, isec_ref, fname):
+ count_ref = isec_ref.count_fr_cells()
+ count_isec = isec.count_fr_cells()
+ count_diff = count_isec - count_ref
+ rel_error = np.abs(count_diff) / np.maximum(count_isec, count_ref)
+ if np.max(rel_error) > 0.1:
+ print("Large error fractures:\n")
+ for i in range(len(count_ref)):
+ print(f"fr #{i}, ref: {count_ref[i]}, isec: {count_isec[i]}, err: {rel_error[i]}")
+ plot_isec_fields([isec, isec_ref], ['isec', 'isec_ref'], workdir / (fname + '.vtk'))
+ assert False
+
+def isec_decovalex_case(fr_list: List[stochastic.Fracture], grid: Grid):
+ """
+ Test detected cell-fracture intersections produced by the DFN Works algorithm.
+ We just generate a fine grid in XY reference plane of the fracture and map
+ them to the grid coordinates to find how much
+ :param fr_set:
+ :return:
+ """
+ fr_set = stochastic.FractureSet.from_list(fr_list)
+ isec = intersection_decovalex(fr_set, grid)
+ isec_corners = intersection_cell_corners(fr_set, grid)
+ isec_probe = probe_fr_intersection(fr_set, grid)
+ compare_intersections(isec, isec_probe, "compare_decovalex")
+ compare_intersections(isec, isec_corners, "compare_corners")
+
+@pytest.mark.skip
+def test_intersection_decovalex():
+ """
+ Test correct set of intersection cells for each fracture.
+ :return:
+ """
+ steps = 3* [41]
+ grid = Grid(3*[100], steps, origin=3*[-50]) # test grid with center in (0,0,0)
+ shape = stochastic.EllipseShape
+
+ fr = lambda r, c, n : stochastic.Fracture(shape.id, r, c, n/np.linalg.norm(n))
+ fr_list = [fr(45, [0, 0.7, 0], [0, 0, 1]),]
+ isec_decovalex_case(fr_list, grid)
+
+ fr_list = [fr(50, [0.7, 0, 0], [0, 1, 0]),]
+ isec_decovalex_case(fr_list, grid)
+
+ fr_list = [fr(50, [0, 0, 0.7], [1, 0, 0]),]
+ isec_decovalex_case(fr_list, grid)
+
+ fr_list = [fr(50, [0, 0, 0], [0, 1, 1]),]
+ isec_decovalex_case(fr_list, grid)
+
+ fr_list = [fr(50, [0, 0, -0.7], [0, 1, 3]),]
+ isec_decovalex_case(fr_list, grid)
+
+ fr_list = [fr(60, [0, 5, -5], [0, 1, 3]),
+ fr(40, [10, 0, -10], [1, 0, 0]),
+ fr(40, [10, 0, 0], [3, 1, 0]),
+ fr(60, [-5, 0, 0], [-2, 1, 3]),
+ ]
+ isec_decovalex_case(fr_list, grid)
+
+def isec_corners_case(fr_list: List[stochastic.Fracture], grid: Grid):
+ """
+ Test detected cell-fracture intersections produced by the DFN Works algorithm.
+ We just generate a fine grid in XY reference plane of the fracture and map
+ them to the grid coordinates to find how much
+ :param fr_set:
+ :return:
+ """
+ fr_set = stochastic.FractureSet.from_list(fr_list)
+ #isec = intersection_decovalex(fr_set, grid)
+ isec_corners = intersection_cell_corners(fr_set, grid)
+ isec_probe = probe_fr_intersection(fr_set, grid)
+ compare_intersections(isec_corners, isec_probe, "compare_corners_rect")
+
+@pytest.mark.skip
+def test_intersection_corners_rectangle():
+ """
+ Test correct set of intersection cells for each fracture.
+ :return:
+ """
+ steps = 3* [41]
+ grid = Grid(3*[100], steps, origin=3*[-50]) # test grid with center in (0,0,0)
+ shape = stochastic.RectangleShape
+
+ fr = lambda r, c, n, ax=[1,0] : stochastic.Fracture(shape.id, r, c, n/np.linalg.norm(n), ax/np.linalg.norm(ax))
+ fr_list = [fr(45, [0, 0.7, 0], [0, 0, 1]),]
+ isec_corners_case(fr_list, grid)
+
+ fr_list = [fr(50, [0.7, 0, 0], [0, 1, 0]),]
+ isec_corners_case(fr_list, grid)
+
+ fr_list = [fr(50, [0, 0, 0.7], [1, 0, 0]),]
+ isec_corners_case(fr_list, grid)
+
+ fr_list = [fr(50, [0, 0, 0], [0, 1, 1]),]
+ isec_corners_case(fr_list, grid)
+
+ fr_list = [fr(50, [0, 0, -0.7], [0, 1, 3], ax=[1,1])]
+ isec_corners_case(fr_list, grid)
+
+ fr_list = [fr(30, [0, 10, -10], [0, 1, 3], ax=[1,1] ),
+ fr(60, [10, 0, -10], [1, 0, 0], ax=[-2,1]),
+ fr(30, [10, 0, 0], [3, 1, 0], ax=[-1,-2]),
+ fr(30, [-10, 0, 0], [-2, 1, 3], ax=[2,-1]),
+ ]
+ isec_corners_case(fr_list, grid)
+
+def test_rasterized_field():
+ """
+ Test whole rasterization process using intersection_cell_corners
+ :return:
+ """
+ source_grid = Grid(3*[100], 3*[41], origin=3*[-50])
+ bulk_source_conductivity = bulk_sphere_field(source_grid, 1.0, 1e-11)
+ bulk_tn = bulk_source_conductivity[:, None, None] * np.eye(3)[None, :, :]
+
+ steps = 3* [41]
+ target_grid = Grid(3*[100], steps, origin=3*[-50]) # test grid with center in (0,0,0)
+
+ shape = stochastic.RectangleShape
+ fr = lambda r, c, n, ax=[1,0] : stochastic.Fracture(shape.id, r, c, n/np.linalg.norm(n), ax/np.linalg.norm(ax))
+ fr_list = [fr(30, [0, 5, -5], [0, 1, 3], ax=[1,1] ),
+ fr(30, [5, 0, -5], [1, 0, 0], ax=[-2,1]),
+ fr(30, [5, 0, 0], [3, 1, 0], ax=[-1,-2]),
+ fr(30, [-5, 0, 0], [-2, 1, 3], ax=[2,-1]),
+ ]
+ fr_set = stochastic.FractureSet.from_list(fr_list)
+ isec_corners = intersection_cell_corners(fr_set, target_grid)
+ #isec_probe = probe_fr_intersection(fr_set, target_grid)
+ cross_section, fr_cond = fr_conductivity(fr_set)
+ rasterized = isec_corners.interpolate(bulk_tn, fr_cond, source_grid=source_grid)
+ plot_isec_fields2(isec_corners, bulk_tn, rasterized, workdir / "raster_field.vtk")
+ for i_ax in range(3):
+ assert np.all(bulk_tn[:, i_ax, i_ax] <= rasterized[:, i_ax, i_ax])
+ for i_ax in range(3):
+ assert np.all(rasterized[:, i_ax, i_ax].max() <= fr_conductivity[:, i_ax, i_ax].max())
+
+
+
+
+# def dfn_4_fractures():
+# return voxelize.FracturedMedia.from_dfn_works(script_dir / "4_fractures", 0.01)
+
+# def test_load_dfnworks():
+# dfn = dfn_4_fractures()
+# assert dfn.dfn.size == 4
+
+
+def tst_fracture_set(R, shape):
+ fr = lambda c, n : stochastic.Fracture(shape.id, R, c, n, 0.0, 123, 1)
+ return [
+ #fr([0, 0, 0.7], [0, 0, 1]),
+ #fr([0, 0.7, 0], [0, 1, 0]),
+ #fr([0.7, 0, 0], [1, 0, 0]),
+ #fr([0, 0, 0], [0.5, 0, 1]),
+ fr([0, 0, 0.7], [0, 0.5, 1]),
+ #fr([0, 0, 0], [0.1, 1, 1]),
+ #fr([0, 0, 0], [0.3, 1, 1]),
+ #fr([0, 0, -0.7], [0.5, 1, 1]),
+ fr([0, 0, -0.5], [1, 1, 1])
+ ]
+
+
+
+def homo_decovalex(fr_media: FracturedMedia, grid:Grid, perm_fn):
+ """
+ Homogenize fr_media to the conductivity tensor field on grid.
+ :return: conductivity_field, np.array, shape (n_elements, n_voight)
+ """
+ ellipses = [dmap.Ellipse(fr.normal, fr.center, fr.scale) for fr in fr_media.dfn]
+ d_grid = dmap.Grid.make_grid(grid.origin, grid.step, grid.dimensions)
+ fractures = dmap.map_dfn(d_grid, ellipses)
+ fr_transmissivity = fr_media.fr_conductivity * fr_media.fr_cross_section
+ return perm_fn(d_grid, fractures, fr_transmissivity, fr_media.conductivity)
+
+def homo_decovalex_iso(fr_media: FracturedMedia, grid:Grid):
+ perm_fn = lambda *args : dmap.permIso(*args)[:, None, None] * np.eye(3)
+ return homo_decovalex(fr_media, grid, perm_fn)
+
+def homo_decovalex_aniso_raw(fr_media: FracturedMedia, grid: Grid):
+ perm_fn = lambda *args : dmap.permAnisoRaw(*args)
+ return homo_decovalex(fr_media, grid, perm_fn)
+
+def homo_decovalex_aniso_diag(fr_media: FracturedMedia, grid: Grid):
+ perm_fn = lambda *args : dmap.aniso_diag(dmap.permAnisoRaw(*args))
+ return homo_decovalex(fr_media, grid, perm_fn)
+
+def homo_decovalex_aniso_lump(fr_media: FracturedMedia, grid: Grid):
+ perm_fn = lambda *args : dmap.aniso_lump(dmap.permAnisoRaw(*args))
+ return homo_decovalex(fr_media, grid, perm_fn)
+
+def rasterize_dfn(fr_set):
+ # Fracture set
+ domain_size = 100
+
+ # Coarse Problem
+ steps = (10, 12, 14)
+ grid = Grid(3*[domain_size], steps, origin=-domain_size / 2)
+
+ dfn = tst_fracture_set(grid.dimensions)
+ fr_media = FracturedMedia.fracture_cond_params(dfn, 0.1, 1)
+
+ xyz_range = [ np.linspace(grid.origin[ax], grid.origin[ax] + grid.dimensions[ax], grid.shape[ax] + 1, dtype=np.float32)
+ for ax in [0, 1, 2]
+ ]
+
+ x, y, z = np.meshgrid(*xyz_range, indexing='ij')
+ pv_grid = pv.StructuredGrid(x, y, z)
+ #points = grid.nodes()
+ for name, homo_fn in homo_fns.items():
+
+ grid_permitivity = homo_fn(fr_media, grid)
+ if len(grid_permitivity.shape) > 1:
+ # anisotropic case
+ assert grid_permitivity.shape[1:] == (3, 3)
+ grid_permitivity = grid_permitivity.reshape(-1, 9)
+ pv_grid.cell_data[name] = grid_permitivity
+ pv_grid.save(str(workdir / "test_resterize.vtk"))
+
+
+
+def test_reasterize():
+ homo_fns=dict(
+ k_deco_iso=voxelize.permeability_iso_decovalex,
+ k_deco_aniso_raw=voxelize.permeability_aniso_decovalex,
+ k_deco_aniso_diag=fn.compose(voxelize.aniso_diag, voxelize.permeability_aniso_decovalex),
+ k_deco_aniso_lump=fn.compose(voxelize.aniso_lump, voxelize.permeability_aniso_decovalex)
+ )
+ rasterize_dfn(homo_fns)
+
+
+# def fracture_band_field(grid, fr_set:stochastic.FractureSet):
+# """
+# 1. define distance field on a grid in the fracture coordinates, grid has step:
+# 1/k * [norm(grid.step)/rx, norm(grid.step)/ry, band_width]
+# band_width = grid.step @ np.abs(normal)
+# Use base_shape slightly enlarged aabb as a source domain extent.
+# Distance field is linear only in Z axis, but has jump on the shape border.
+# 2. evaluate in cell centers transformed into fracture system use:
+# scipy.interpolate.interpn
+# values on fracture = cross_section /
+# :param grid:
+# :param fr_set:
+# :return:
+# """
+# N = 1e4
+# aabb = fr_set.base_shape.aabb
+# shape_grid = Grid.from_aabb(aabb, n_steps)oints = np.random.random((N, 2)) * (aabb[1] - aabb[0]) + aabb[0]
+# inside_vector = fr_set.base_shape.are_points_inside(points)
+# points = points[inside_vector]
+#
+# i_cells = grid.project_points(fr_set.transform_mat[0] @ points.T + fr_set.center[0])
+# field = np.bincount(i_cells, minlength=grid.n_elements) / len(points)
+# return field
+#
+# def compare_voxelization(grid, fractures:stochastic.FractureSet):
+# """
+# For given grid compare all available voxelization functions
+# with respect to the sampling the fracture band by points
+# :return:
+# """
+# cross_section = 1e-4 * fractures.radius_norm
+# domain = FracturedDomain(fractures, cross_section, grid)
+# isec_band = Intersection.band_fractures(domain)
+# for fr in fractures:
+#
+#
+# def test_voxelize_single_fracture():
+# size = 100
+# domain = Grid(size, 16)
+# fractures = fixtures.get_dfn_sample(size, 123)
+# for fr in fractures:
+# fr_set = stochastic.FractureSet.from_list([fr])
+# compare_voxelization(grid, fr_set)
\ No newline at end of file
diff --git a/tox.ini b/tox.ini
index 0c191df..be635e5 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,30 +2,34 @@
# content of: tox.ini , put in same dir as setup.py
[tox]
-envlist = py36, py37, py38
-#envlist = py38
+#envlist = py36, py37, py38
+envlist = py312
[gh-actions]
python =
3.6: py36
3.7: py37
3.8: py38
-
+ 3.10: py310
+ 3.12: py312
[testenv]
# dependencies for tests (include dependencies of the package itself)
# these are installed before tox run installation of bgem
-deps =
- pytest<5.1.0
- plotly
- matplotlib
+# TODO: how to include requirements.txt; that shold be meant for testing
+deps = -rrequirements.txt
+ #pytest
+ #plotly
+ #matplotlib
pandas
#setenv =
# MPLBACKEND=Qt5Agg
#passenv = MPLBACKEND
+usedevelop=true
commands =
pytest {posargs}
+
[testenv:coverage-report]
deps = coverage
skip_install = true