<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3.dtd">
<article article-type="research-article" dtd-version="1.3" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xml:lang="ru"><front><journal-meta><journal-id journal-id-type="publisher-id">zldm</journal-id><journal-title-group><journal-title xml:lang="ru">Заводская лаборатория. Диагностика материалов</journal-title><trans-title-group xml:lang="en"><trans-title>Industrial laboratory. Diagnostics of materials</trans-title></trans-title-group></journal-title-group><issn pub-type="ppub">1028-6861</issn><issn pub-type="epub">2588-0187</issn><publisher><publisher-name>ООО «Издательство «ТЕСТ-ЗЛ»</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="doi">10.26896/1028-6861-2026-92-3-87-94</article-id><article-id custom-type="elpub" pub-id-type="custom">zldm-2764</article-id><article-categories><subj-group subj-group-type="heading"><subject>Research Article</subject></subj-group><subj-group subj-group-type="section-heading" xml:lang="ru"><subject>МАТЕМАТИЧЕСКИЕ МЕТОДЫ ИССЛЕДОВАНИЯ</subject></subj-group><subj-group subj-group-type="section-heading" xml:lang="en"><subject>MATHEMATICAL METHODS OF INVESTIGATION</subject></subj-group></article-categories><title-group><article-title>Основы системно-синергетической теории информации и универсальный информационный вариационный принцип развития систем</article-title><trans-title-group xml:lang="en"><trans-title>Fundamentals of the system-synergetic theory of information and the universal informational variational principle of system development</trans-title></trans-title-group></title-group><contrib-group><contrib contrib-type="author" corresp="yes"><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Луценко</surname><given-names>Е. В.</given-names></name><name name-style="western" xml:lang="en"><surname>Lutsenko</surname><given-names>E. V.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Евгений Вениаминович Луценко</p><p>350044, г. Краснодар, ул. Калинина, д. 13</p></bio><bio xml:lang="en"><p>Evgeny V. Lutsenko</p><p>13, ul. Kalinina, Krasnodar, 350044</p></bio><email xlink:type="simple">prof.lutsenko@gmail.com</email><xref ref-type="aff" rid="aff-1"/></contrib><contrib contrib-type="author" corresp="yes"><name-alternatives><name name-style="eastern" xml:lang="ru"><surname>Головин</surname><given-names>Н. С.</given-names></name><name name-style="western" xml:lang="en"><surname>Golovin</surname><given-names>N. S.</given-names></name></name-alternatives><bio xml:lang="ru"><p>Никита Сергеевич Головин</p><p>г. Нови-Сад, ул. Доситеева, д. 8; e-mail: </p></bio><bio xml:lang="en"><p>Nikita S. Golovin</p><p>8, ul. Dositejeva, Novi Sad</p></bio><email xlink:type="simple">nikitagolovin416@gmail.com</email><xref ref-type="aff" rid="aff-2"/></contrib></contrib-group><aff-alternatives id="aff-1"><aff xml:lang="ru">ФГБОУ ВО Кубанский ГАУ<country>Россия</country></aff><aff xml:lang="en">Kuban State Agrarian University<country>Russian Federation</country></aff></aff-alternatives><aff-alternatives id="aff-2"><aff xml:lang="ru">Элитная частная экономическая школа<country>Сербия</country></aff><aff xml:lang="en">Elite Private Economic School<country>Serbia</country></aff></aff-alternatives><pub-date pub-type="collection"><year>2026</year></pub-date><pub-date pub-type="epub"><day>26</day><month>03</month><year>2026</year></pub-date><volume>92</volume><issue>3</issue><fpage>87</fpage><lpage>94</lpage><permissions><copyright-statement>Copyright &amp;#x00A9; Луценко Е.В., Головин Н.С., 2026</copyright-statement><copyright-year>2026</copyright-year><copyright-holder xml:lang="ru">Луценко Е.В., Головин Н.С.</copyright-holder><copyright-holder xml:lang="en">Lutsenko E.V., Golovin N.S.</copyright-holder><license license-type="creative-commons-attribution" xlink:href="https://creativecommons.org/licenses/by/4.0/" xlink:type="simple"><license-p>This work is licensed under a Creative Commons Attribution 4.0 License.</license-p></license></permissions><self-uri xlink:href="https://www.zldm.ru/jour/article/view/2764">https://www.zldm.ru/jour/article/view/2764</self-uri><abstract><p>Изложены основы объединенной системно-синергетической теории информации, значительно превосходящей по концепциям и практическому применению классические подходы. Эта теория — результат формального синтеза двух передовых российских научных школ: синергетической теории, которая описывает информацию через призму динамических процессов самоорганизации, и системной теории, определяющей информацию через структурно-иерархические и эмерджентные свойства систем. В отличие от классической теории, которая рассматривает информацию как меру разнообразия в множестве несвязанных элементов, предложенный подход вводит понятие «системы», где ключевую роль играют взаимосвязи между элементами. Это позволяет количественно измерять такие ранее сугубо качественные понятия, как сложность и эмерджентность. Основным результатом теории является формулировка универсального информационного вариационного принципа, который постулирует, что развитие любых открытых систем — от физических и биологических до экономических и социальных — происходит таким образом, чтобы максимизировать скорость приращения информации. Этот принцип предлагается рассматривать как одну из ключевых закономерностей эволюции, определяющих направление развития систем в сторону усложнения и упорядочивания. Теория предлагает конкретную метрику для измерения системности и сложности, такую как коэффициент эмерджентности, который показывает, во сколько раз информационная емкость системы превышает информационную емкость простого набора ее элементов. В данной работе предложен подход к преодолению ряда ограничений классической теории информации и сформулирована плодотворная программа для будущих исследований, понимания и моделирования сложных систем, описан единый механизм для широкого спектра явлений. Практические применения теории включают разработку новых методов анализа больших данных, создание более адаптивных и самообучающихся систем искусственного интеллекта, а также прогнозирование развития рынков и социальных сетей. Таким образом, предложенная теория закладывает основы для создания единой науки о сложности, объединяя мнения ученых из различных областей.</p></abstract><trans-abstract xml:lang="en"><p>The article outlines the conceptual foundations of a unified system-synergetic theory of information, which represents a significant step forward compared to classical approaches. This theory is the result of a formal synthesis of two leading Russian scientific schools: the synergetic theory of information, which describes information through the prism of dynamic self-organization processes, and the systems theory of information, which defines information through the structural-hierarchical and emergent properties of systems. Unlike classical theory, which views information as a measure of diversity within a set of unrelated elements, the proposed approach introduces the concept of a «system», where interconnections between elements play a pivotal role. This allows for the quantitative measurement of such previously purely qualitative concepts as complexity and emergence. The central result of the theory is the formulation of the universal information variational principle. This principle postulates that the development of any open system — from physical and biological to economic and social — occurs in such a way as to maximize the rate of information increment. It is proposed that this principle be viewed as one of the key regularities of evolution, determining the direction of system development toward increasing complexity and order. The theory offers specific metrics for measuring systemicity and complexity, such as the coefficient of emergence, which indicates how many times the information capacity of a system exceeds the information capacity of a simple set of its elements. In conclusion, this work offers an approach to overcoming a number of limitations inherent in classical information theory and formulates a fruitful program for future research. It opens new horizons for understanding and modeling complex systems by offering a unified explanatory mechanism for a wide range of phenomena. Practical applications of the theory include the development of new methods for big data analysis, the creation of more adaptive and self-learning artificial intelligence systems, and the forecasting of market and social network development. Thus, the proposed theory lays the foundations for the creation of a unified science of complexity, uniting the efforts of scientists from various fields.</p></trans-abstract><kwd-group xml:lang="ru"><kwd>системная теория информации</kwd><kwd>синергетическая теория информации</kwd><kwd>универсальный информационный вариационный принцип</kwd><kwd>самоорганизация</kwd><kwd>эмерджентность</kwd><kwd>развитие систем</kwd></kwd-group><kwd-group xml:lang="en"><kwd>system theory of information</kwd><kwd>synergetic theory of information</kwd><kwd>universal informational variational principle</kwd><kwd>self-organization</kwd><kwd>emergence</kwd><kwd>system development</kwd></kwd-group></article-meta></front><back><ref-list><title>References</title><ref id="cit1"><label>1</label><citation-alternatives><mixed-citation xml:lang="ru">Lutsenko E. V. On the possibility of combining systemic and synergetic information theories / Polythem. Online Electron. Sci. J. Kuban State Agrar. Univ. 2025. No. 212. P. 227 – 237. DOI: 10.21515/1990-4665-212-019</mixed-citation><mixed-citation xml:lang="en">Lutsenko E. V. On the possibility of combining systemic and synergetic information theories / Polythem. Online Electron. Sci. J. Kuban State Agrar. Univ. 2025. No. 212. P. 227 – 237. DOI: 10.21515/1990-4665-212-019</mixed-citation></citation-alternatives></ref><ref id="cit2"><label>2</label><citation-alternatives><mixed-citation xml:lang="ru">Vyatkin V. B. A synergetic theory of information / Information. 2019. Vol. 10. P. 142. DOI: 10.3390/info10040142</mixed-citation><mixed-citation xml:lang="en">Vyatkin V. B. A synergetic theory of information / Information. 2019. Vol. 10. P. 142. DOI: 10.3390/info10040142</mixed-citation></citation-alternatives></ref><ref id="cit3"><label>3</label><citation-alternatives><mixed-citation xml:lang="ru">Xin G., Fan P., Letaief K. B. Semantic communication: entropy and bottleneck perspectives / Entropy. 2024. Vol. 26. No. 2. P. 102. DOI: 10.3390/e26020102</mixed-citation><mixed-citation xml:lang="en">Xin G., Fan P., Letaief K. B. Semantic communication: entropy and bottleneck perspectives / Entropy. 2024. Vol. 26. No. 2. P. 102. DOI: 10.3390/e26020102</mixed-citation></citation-alternatives></ref><ref id="cit4"><label>4</label><citation-alternatives><mixed-citation xml:lang="ru">Lu C. A semantic generalization of Shannon’s information theory and applications / Entropy. 2025. Vol. 27. No. 5. P. 461. DOI: 10.3390/e27050461</mixed-citation><mixed-citation xml:lang="en">Lu C. A semantic generalization of Shannon’s information theory and applications / Entropy. 2025. Vol. 27. No. 5. P. 461. DOI: 10.3390/e27050461</mixed-citation></citation-alternatives></ref><ref id="cit5"><label>5</label><citation-alternatives><mixed-citation xml:lang="ru">Fitousi D. Information-theoretic measures of metacognitive efficiency: empirical validation with the face matching task / Entropy. 2025. Vol. 27. No. 4. P. 353. DOI: 10.3390/e27040353</mixed-citation><mixed-citation xml:lang="en">Fitousi D. Information-theoretic measures of metacognitive efficiency: empirical validation with the face matching task / Entropy. 2025. Vol. 27. No. 4. P. 353. DOI: 10.3390/e27040353</mixed-citation></citation-alternatives></ref><ref id="cit6"><label>6</label><citation-alternatives><mixed-citation xml:lang="ru">Zhang P., Liu Y., Song Y., Zhang J. Advances and challenges in semantic communications / Natl. Sci. Open. 2024. Vol. 3. 20230029. DOI: 10.1360/nso/20230029</mixed-citation><mixed-citation xml:lang="en">Zhang P., Liu Y., Song Y., Zhang J. Advances and challenges in semantic communications / Natl. Sci. Open. 2024. Vol. 3. 20230029. DOI: 10.1360/nso/20230029</mixed-citation></citation-alternatives></ref><ref id="cit7"><label>7</label><citation-alternatives><mixed-citation xml:lang="ru">Murphy C., Thibeault V., Allard A., Desrosiers P. Information-theoretic reconstruction framework for complex systems / Nat. Comm. 2024. Vol. 15. 4478. DOI: 10.1038/s41467-024-48020-x</mixed-citation><mixed-citation xml:lang="en">Murphy C., Thibeault V., Allard A., Desrosiers P. Information-theoretic reconstruction framework for complex systems / Nat. Comm. 2024. Vol. 15. 4478. DOI: 10.1038/s41467-024-48020-x</mixed-citation></citation-alternatives></ref><ref id="cit8"><label>8</label><citation-alternatives><mixed-citation xml:lang="ru">Lindgren K. Information theory for complex systems: an information perspective on complexity in dynamical systems and statistical mechanics. — Springer, 2024. DOI: 10.1007/978-3-662-68214-2</mixed-citation><mixed-citation xml:lang="en">Lindgren K. Information theory for complex systems: an information perspective on complexity in dynamical systems and statistical mechanics. — Springer, 2024. DOI: 10.1007/978-3-662-68214-2</mixed-citation></citation-alternatives></ref><ref id="cit9"><label>9</label><citation-alternatives><mixed-citation xml:lang="ru">Wang Z., Janowicz K., Mai G., Majic I. Probing the information theoretical roots of spatial dependence measures / COSIT 2024, LIPIcs. 2024. Vol. 9. DOI: 10.4230/lipics.cosit.2024.9</mixed-citation><mixed-citation xml:lang="en">Wang Z., Janowicz K., Mai G., Majic I. Probing the information theoretical roots of spatial dependence measures / COSIT 2024, LIPIcs. 2024. Vol. 9. DOI: 10.4230/lipics.cosit.2024.9</mixed-citation></citation-alternatives></ref><ref id="cit10"><label>10</label><citation-alternatives><mixed-citation xml:lang="ru">Wang H., Song C., Gao P. Complexity and entropy of natural patterns / PNAS Nexus. 2024. Vol. 3. No. 10. P. 417. DOI: 10.1093/pnasnexus/pgae417</mixed-citation><mixed-citation xml:lang="en">Wang H., Song C., Gao P. Complexity and entropy of natural patterns / PNAS Nexus. 2024. Vol. 3. No. 10. P. 417. DOI: 10.1093/pnasnexus/pgae417</mixed-citation></citation-alternatives></ref><ref id="cit11"><label>11</label><citation-alternatives><mixed-citation xml:lang="ru">Paul S., Chowdhury A. R., Gangopadhyay S. Information theoretic measures for lifshitz system / J. High Energy Phys. 2024. No. 10. 33. DOI: 10.1007/jhep10(2024)033</mixed-citation><mixed-citation xml:lang="en">Paul S., Chowdhury A. R., Gangopadhyay S. Information theoretic measures for lifshitz system / J. High Energy Phys. 2024. No. 10. 33. DOI: 10.1007/jhep10(2024)033</mixed-citation></citation-alternatives></ref><ref id="cit12"><label>12</label><citation-alternatives><mixed-citation xml:lang="ru">Suriano M., Caram L. F., Caiafa C., et al. Information theory quantifiers in cryptocurrency time series analysis / Entropy. 2025. Vol. 27. No. 4. P. 450. DOI: 10.3390/e27040450</mixed-citation><mixed-citation xml:lang="en">Suriano M., Caram L. F., Caiafa C., et al. Information theory quantifiers in cryptocurrency time series analysis / Entropy. 2025. Vol. 27. No. 4. P. 450. DOI: 10.3390/e27040450</mixed-citation></citation-alternatives></ref><ref id="cit13"><label>13</label><citation-alternatives><mixed-citation xml:lang="ru">Dayan P. Metacognitive information theory / Open Mind. 2023. DOI: 10.1162/opmi_a_00091</mixed-citation><mixed-citation xml:lang="en">Dayan P. Metacognitive information theory / Open Mind. 2023. DOI: 10.1162/opmi_a_00091</mixed-citation></citation-alternatives></ref><ref id="cit14"><label>14</label><citation-alternatives><mixed-citation xml:lang="ru">Ortiz-Muñoz A. Homotopical entropy: a mathematical generalization / arXiv. 2025. 2501. 10672. DOI: 10.48550/arxiv.2501.10672</mixed-citation><mixed-citation xml:lang="en">Ortiz-Muñoz A. Homotopical entropy: a mathematical generalization / arXiv. 2025. 2501. 10672. DOI: 10.48550/arxiv.2501.10672</mixed-citation></citation-alternatives></ref><ref id="cit15"><label>15</label><citation-alternatives><mixed-citation xml:lang="ru">Ataei M., Wang X. Derangetropy in probability distributions and information dynamics / arXiv. 2024. 2409. 15301. DOI: 10.48550/arxiv.2409.15301</mixed-citation><mixed-citation xml:lang="en">Ataei M., Wang X. Derangetropy in probability distributions and information dynamics / arXiv. 2024. 2409. 15301. DOI: 10.48550/arxiv.2409.15301</mixed-citation></citation-alternatives></ref><ref id="cit16"><label>16</label><citation-alternatives><mixed-citation xml:lang="ru">Barbarossa S., Comminiello D., Grassucci E., et al. Semantic communications based on adaptive generative models and information bottleneck / IEEE Comm. Mag. 2023. Vol. 61. No. 11. P. 36 – 41. DOI: 10.1109/mcom.005.2200829</mixed-citation><mixed-citation xml:lang="en">Barbarossa S., Comminiello D., Grassucci E., et al. Semantic communications based on adaptive generative models and information bottleneck / IEEE Comm. Mag. 2023. Vol. 61. No. 11. P. 36 – 41. DOI: 10.1109/mcom.005.2200829</mixed-citation></citation-alternatives></ref><ref id="cit17"><label>17</label><citation-alternatives><mixed-citation xml:lang="ru">Ma S., Zhang C., Qi H., et al. A theory for semantic channel coding with many-to-one source / IEEE Trans. Cognitive Comm. Networking. 2025. DOI: 10.1109/tccn.2025.3544275</mixed-citation><mixed-citation xml:lang="en">Ma S., Zhang C., Qi H., et al. A theory for semantic channel coding with many-to-one source / IEEE Trans. Cognitive Comm. Networking. 2025. DOI: 10.1109/tccn.2025.3544275</mixed-citation></citation-alternatives></ref><ref id="cit18"><label>18</label><citation-alternatives><mixed-citation xml:lang="ru">Yang W., Du H., Liew Z. Q., et al. Semantic communications for future internet: fundamentals, applications, and challenges / IEEE Comm. Surveys Tutorials. 2023. Vol. 25. No. 1. P. 213 – 250. DOI: 10.1109/comst.2022.3223224</mixed-citation><mixed-citation xml:lang="en">Yang W., Du H., Liew Z. Q., et al. Semantic communications for future internet: fundamentals, applications, and challenges / IEEE Comm. Surveys Tutorials. 2023. Vol. 25. No. 1. P. 213 – 250. DOI: 10.1109/comst.2022.3223224</mixed-citation></citation-alternatives></ref><ref id="cit19"><label>19</label><citation-alternatives><mixed-citation xml:lang="ru">Cea I., Signorelli C. M. How to be an integrated information theorist without losing your body / Frontiers Comput. Neurosci. 2025. Vol. 18. 1510066. DOI: 10.3389/fncom.2024.1510066</mixed-citation><mixed-citation xml:lang="en">Cea I., Signorelli C. M. How to be an integrated information theorist without losing your body / Frontiers Comput. Neurosci. 2025. Vol. 18. 1510066. DOI: 10.3389/fncom.2024.1510066</mixed-citation></citation-alternatives></ref><ref id="cit20"><label>20</label><citation-alternatives><mixed-citation xml:lang="ru">Varley T. F. Information theory for complex systems scientists: what, why, and how / Phys. Rep. 2026. Vol. 1102. P. 1 – 55. DOI: 10.1016/j.physrep.2025.11.002</mixed-citation><mixed-citation xml:lang="en">Varley T. F. Information theory for complex systems scientists: what, why, and how / Phys. Rep. 2026. Vol. 1102. P. 1 – 55. DOI: 10.1016/j.physrep.2025.11.002</mixed-citation></citation-alternatives></ref><ref id="cit21"><label>21</label><citation-alternatives><mixed-citation xml:lang="ru">Mann S. F. The relevance of communication theory for theories of representation / Philos. Mind Sci. 2023. Vol. 4. 32. DOI: 10.33735/phimisci.2023.10992</mixed-citation><mixed-citation xml:lang="en">Mann S. F. The relevance of communication theory for theories of representation / Philos. Mind Sci. 2023. Vol. 4. 32. DOI: 10.33735/phimisci.2023.10992</mixed-citation></citation-alternatives></ref></ref-list><fn-group><fn fn-type="conflict"><p>The authors declare that there are no conflicts of interest present.</p></fn></fn-group></back></article>
