protege logo

Instance: Toda_la_bibliografia

Types
Own Slots
  Slot Name Value
bibitems @book{Kennedy2001, author = {Kennedy, James and Eberhart, Russell C.}, title = {Swarm intelligence}, year = {2001}, isbn = {1-55860-595-9}, publisher = {Morgan Kaufmann Publishers Inc.}, address = {San Francisco, CA, USA}, }, @book{yang2011nature, title={Nature-Inspired Metaheuristic Algorithms: Second Edition}, author={Yang, X.S.}, isbn={9781905986286}, url={http://books.google.es/books?id=iVB\_ETlh4ogC}, year={2011}, publisher={Luniver Press} }, @article{Omran2009, title = "Bare bones differential evolution", journal = "European Journal of Operational Research", volume = "196", number = "1", pages = "128 - 139", year = "2009", note = "", issn = "0377-2217", doi = "10.1016/j.ejor.2008.02.035", url = "http://www.sciencedirect.com/science/article/pii/S0377221708002440", author = "Mahamed G.H. Omran and Andries P. Engelbrecht and Ayed Salman", keywords = "Evolutionary computation", keywords = "Differential evolution", keywords = "Particle swarm optimization", keywords = "Optimization", abstract = "The barebones differential evolution (BBDE) is a new, almost parameter-free optimization algorithm that is a hybrid of the barebones particle swarm optimizer and differential evolution. Differential evolution is used to mutate, for each particle, the attractor associated with that particle, defined as a weighted average of its personal and neighborhood best positions. The performance of the proposed approach is investigated and compared with differential evolution, a Von Neumann particle swarm optimizer and a barebones particle swarm optimizer. The experiments conducted show that the BBDE provides excellent results with the added advantage of little, almost no parameter tuning. Moreover, the performance of the barebones differential evolution using the ring and Von Neumann neighborhood topologies is investigated. Finally, the application of the BBDE to the real-world problem of unsupervised image classification is investigated. Experimental results show that the proposed approach performs very well compared to other state-of-the-art clustering algorithms in all measured criteria." }, @ARTICLE{Dorigo2006, author={Dorigo, M. and Birattari, M. and Stutzle, T.}, journal={Computational Intelligence Magazine, IEEE}, title={Ant colony optimization}, year={2006}, volume={1}, number={4}, pages={28-39}, abstract={Swarm intelligence is a relatively new approach to problem solving that takes inspiration from the social behaviors of insects and of other animals. In particular, ants have inspired a number of methods and techniques among which the most studied and the most successful is the general purpose optimization technique known as ant colony optimization. Ant colony optimization (ACO) takes inspiration from the foraging behavior of some ant species. These ants deposit pheromone on the ground in order to mark some favorable path that should be followed by other members of the colony. Ant colony optimization exploits a similar mechanism for solving optimization problems. From the early nineties, when the first ant colony optimization algorithm was proposed, ACO attracted the attention of increasing numbers of researchers and many successful applications are now available. Moreover, a substantial corpus of theoretical results is becoming available that provides useful guidelines to researchers and practitioners in further applications of ACO. The goal of this article is to introduce ant colony optimization and to survey its most notable applications}, keywords={artificial life;particle swarm optimisation;ant colony optimization;ant species;artificial ants;computational intelligence;foraging behavior;insect social behaviors;swarm intelligence;Animals;Ant colony optimization;Bridges;Competitive intelligence;Computational and artificial intelligence;Computational intelligence;Fluctuations;Guidelines;Insects;Problem-solving}, doi={10.1109/MCI.2006.329691}, ISSN={1556-603X},}, @article{dorigo1991positive, title={Positive feedback as a search strategy}, author={Dorigo, Marco and Maniezzo, Vittorio and Colorni, Alberto and Dorigo, M and Maniezzo, V and Colorni, A and others}, year={1991} }, @article{ozcan1998analysis, title={Analysis of a simple particle swarm optimization system}, author={Ozcan, Ender and Mohan, Chilukuri K}, journal={Intelligent Engineering Systems Through Artificial Neural Networks}, volume={8}, pages={253--258}, year={1998} }, @article{yang2011accelerated, title={Accelerated particle swarm optimization and support vector machine for business optimization and applications}, author={Yang, Xin-She and Deb, Suash and Fong, Simon}, journal={Networked Digital Technologies}, pages={53--66}, year={2011}, publisher={Springer} }, @ARTICLE{Clerc2002, author={Clerc, M. and Kennedy, J.}, journal={Evolutionary Computation, IEEE Transactions on}, title={The particle swarm - explosion, stability, and convergence in a multidimensional complex space}, year={2002}, volume={6}, number={1}, pages={58-73}, abstract={The particle swarm is an algorithm for finding optimal regions of complex search spaces through the interaction of individuals in a population of particles. This paper analyzes a particle's trajectory as it moves in discrete time (the algebraic view), then progresses to the view of it in continuous time (the analytical view). A five-dimensional depiction is developed, which describes the system completely. These analyses lead to a generalized model of the algorithm, containing a set of coefficients to control the system's convergence tendencies. Some results of the particle swarm optimizer, implementing modifications derived from the analysis, suggest methods for altering the original algorithm in ways that eliminate problems and increase the ability of the particle swarm to find optima of some well-studied test functions}, keywords={convergence of numerical methods;genetic algorithms;numerical stability;search problems;convergence;evolutionary computation;multidimensional complex space;optimization;particle swarm;particle trajectory;search spaces;stability;Algorithm design and analysis;Control system synthesis;Convergence;Evolutionary computation;Multidimensional systems;Optimization methods;Particle swarm optimization;Stability;Stochastic systems;Testing}, doi={10.1109/4235.985692}, ISSN={1089-778X},}, @article{Trelea2003, title = "The particle swarm optimization algorithm: convergence analysis and parameter selection", journal = "Information Processing Letters", volume = "85", number = "6", pages = "317 - 325", year = "2003", note = "", issn = "0020-0190", doi = "10.1016/S0020-0190(02)00447-7", url = "http://www.sciencedirect.com/science/article/pii/S0020019002004477", author = "Ioan Cristian Trelea", keywords = "Particle swarm optimization", keywords = "Stochastic optimization", keywords = "Analysis of algorithms", keywords = "Parallel algorithms", abstract = "The particle swarm optimization algorithm is analyzed using standard results from the dynamic system theory. Graphical parameter selection guidelines are derived. The exploration–exploitation tradeoff is discussed and illustrated. Examples of performance on benchmark functions superior to previously published results are given." }, @ARTICLE{Goss1989, author={Goss, S. and Aron, S. and Deneubourg, J.L. and Pasteels, J.M.}, title={Self-organized shortcuts in the Argentine ant}, journal={Naturwissenschaften}, year={1989}, volume={76}, number={12}, pages={579-581}, note={cited By (since 1996) 257}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-0024827650&partnerID=40&md5=343e32f88f6b5dc7f2bd8c3af138b341}, document_type={Article}, source={Scopus}, }, @article{Niknam2010, title = "An efficient hybrid approach based on PSO, ACO and k-means for cluster analysis", journal = "Applied Soft Computing", volume = "10", number = "1", pages = "183 - 197", year = "2010", note = "", issn = "1568-4946", doi = "10.1016/j.asoc.2009.07.001", url = "http://www.sciencedirect.com/science/article/pii/S1568494609000854", author = "Taher Niknam and Babak Amiri", keywords = "Ant colony optimization (ACO)", keywords = "Data clustering", keywords = "Hybrid evolutionary optimization algorithm", keywords = "k-means clustering", keywords = "Fuzzy adaptive particle swarm optimization (FAPSO)", abstract = "Clustering is a popular data analysis and data mining technique. A popular technique for clustering is based on k-means such that the data is partitioned into K clusters. However, the k-means algorithm highly depends on the initial state and converges to local optimum solution. This paper presents a new hybrid evolutionary algorithm to solve nonlinear partitional clustering problem. The proposed hybrid evolutionary algorithm is the combination of FAPSO (fuzzy adaptive particle swarm optimization), ACO (ant colony optimization) and k-means algorithms, called FAPSO-ACO‰ÛÒK, which can find better cluster partition. The performance of the proposed algorithm is evaluated through several benchmark data sets. The simulation results show that the performance of the proposed algorithm is better than other algorithms such as PSO, ACO, simulated annealing (SA), combination of PSO and SA (PSO‰ÛÒSA), combination of ACO and SA (ACO‰ÛÒSA), combination of PSO and ACO (PSO‰ÛÒACO), genetic algorithm (GA), Tabu search (TS), honey bee mating optimization (HBMO) and k-means for partitional clustering problem." }, @ARTICLE{Xinchao2010, author={Xinchao, Z.}, title={A perturbed particle swarm algorithm for numerical optimization}, journal={Applied Soft Computing Journal}, year={2010}, volume={10}, number={1}, pages={119-124}, note={cited By (since 1996) 47}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-70350103089&partnerID=40&md5=df4fe5c85b3a3ae2191348ce78f84b31}, affiliation={School of Science, Beijing University of Posts and Telecommunications, Beijing, 100876, China}, abstract={The canonical particle swarm optimization (PSO) has its own disadvantages, such as the high speed of convergence which often implies a rapid loss of diversity during the optimization process, which inevitably leads to undesirable premature convergence. In order to overcome the disadvantage of PSO, a perturbed particle swarm algorithm (pPSA) is presented based on the new particle updating strategy which is based upon the concept of perturbed global best to deal with the problem of premature convergence and diversity maintenance within the swarm. A linear model and a random model together with the initial max-min model are provided to understand and analyze the uncertainty of perturbed particle updating strategy. pPSA is validated using 12 standard test functions. The preliminary results indicate that pPSO performs much better than PSO both in quality of solutions and robustness and comparable with GCPSO. The experiments confirm us that the perturbed particle updating strategy is an encouraging strategy for stochastic heuristic algorithms and the max-min model is a promising model on the concept of possibility measure. © 2009 Elsevier B.V. All rights reserved.}, author_keywords={Numerical optimization; Particle swarm optimization; Particle updating strategy; Perturbed PSA}, document_type={Article}, source={Scopus}, }, @ARTICLE{Zhan2009, author={Zhi-Hui Zhan and Jun Zhang and Yun Li and Chung, H.S.-H.}, journal={Systems, Man, and Cybernetics, Part B: Cybernetics, IEEE Transactions on}, title={Adaptive Particle Swarm Optimization}, year={2009.}, volume={39}, number={6}, pages={1362-1381}, abstract={An adaptive particle swarm optimization (APSO) that features better search efficiency than classical particle swarm optimization (PSO) is presented. More importantly, it can perform a global search over the entire search space with faster convergence speed. The APSO consists of two main steps. First, by evaluating the population distribution and particle fitness, a real-time evolutionary state estimation procedure is performed to identify one of the following four defined evolutionary states, including exploration, exploitation, convergence, and jumping out in each generation. It enables the automatic control of inertia weight, acceleration coefficients, and other algorithmic parameters at run time to improve the search efficiency and convergence speed. Then, an elitist learning strategy is performed when the evolutionary state is classified as convergence state. The strategy will act on the globally best particle to jump out of the likely local optima. The APSO has comprehensively been evaluated on 12 unimodal and multimodal benchmark functions. The effects of parameter adaptation and elitist learning will be studied. Results show that APSO substantially enhances the performance of the PSO paradigm in terms of convergence speed, global optimality, solution accuracy, and algorithm reliability. As APSO introduces two new parameters to the PSO paradigm only, it does not introduce an additional design or implementation complexity.}, keywords={particle swarm optimisation;state estimation;adaptive particle swarm optimization;algorithm reliability;convergence speed;convergence state;elitist learning strategy;global optimality;inertia weight automatic control;particle fitness evaluation;population distribution evaluation;real-time evolutionary state estimation procedure;solution accuracy;Adaptive particle swarm optimization (APSO);evolutionary computation;global optimization;particle swarm optimization (PSO)}, doi={10.1109/TSMCB.2009.2015956}, ISSN={1083-4419},}, @article{Bratton2008, author = {Bratton, Dan and Blackwell, Tim}, title = {A simplified recombinant PSO}, journal = {J. Artif. Evol. App.}, issue_date = {January 2008}, volume = {2008}, month = jan, year = {2008}, issn = {1687-6229}, pages = {14:1--14:10}, articleno = {14}, numpages = {10}, url = {http://dx.doi.org/10.1155/2008/654184}, doi = {10.1155/2008/654184}, acmid = {1384943}, publisher = {Hindawi Publishing Corp.}, address = {New York, NY, United States}, }, @article{Pedersen2010, title = "Simplifying Particle Swarm Optimization", journal = "Applied Soft Computing", volume = "10", number = "2", pages = "618 - 628", year = "2010", note = "", issn = "1568-4946", doi = "10.1016/j.asoc.2009.08.029", url = "http://www.sciencedirect.com/science/article/pii/S1568494609001549", author = "M.E.H. Pedersen and A.J. Chipperfield", keywords = "Numerical optimization", keywords = "Stochastic", keywords = "Swarm", keywords = "Tuning", keywords = "Simplifying", abstract = "The general purpose optimization method known as Particle Swarm Optimization (PSO) has received much attention in past years, with many attempts to find the variant that performs best on a wide variety of optimization problems. The focus of past research has been with making the PSO method more complex, as this is frequently believed to increase its adaptability to other optimization problems. This study takes the opposite approach and simplifies the PSO method. To compare the efficacy of the original PSO and the simplified variant here, an easy technique is presented for efficiently tuning their behavioural parameters. The technique works by employing an overlaid meta-optimizer, which is capable of simultaneously tuning parameters with regard to multiple optimization problems, whereas previous approaches to meta-optimization have tuned behavioural parameters to work well on just a single optimization problem. It is then found that not only the PSO method and its simplified variant have comparable performance for optimizing a number of Artificial Neural Network problems, but also the simplified variant appears to offer a small improvement in some cases." }, @article{clerc2004discrete, title={Discrete particle swarm optimization illustrated by the traveling salesman problem}, author={Clerc, Maurice}, journal={New optimization techniques in engineering}, volume={141}, pages={219--239}, year={2004}, publisher={Springer Heidelberg, Germany} }, @article{Nickabadi2011, title = "A novel particle swarm optimization algorithm with adaptive inertia weight", journal = "Applied Soft Computing", volume = "11", number = "4", pages = "3658 - 3670", year = "2011", note = "", issn = "1568-4946", doi = "10.1016/j.asoc.2011.01.037", url = "http://www.sciencedirect.com/science/article/pii/S156849461100055X", author = "Ahmad Nickabadi and Mohammad Mehdi Ebadzadeh and Reza Safabakhsh", keywords = "Particle swarm optimization", keywords = "Inertia weight", keywords = "Adaptation", keywords = "Success rate", abstract = "Particle swarm optimization (PSO) is a stochastic population-based algorithm motivated by intelligent collective behavior of some animals. The most important advantages of the PSO are that PSO is easy to implement and there are few parameters to adjust. The inertia weight (w) is one of PSO's parameters originally proposed by Shi and Eberhart to bring about a balance between the exploration and exploitation characteristics of PSO. Since the introduction of this parameter, there have been a number of proposals of different strategies for determining the value of inertia weight during a course of run. This paper presents the first comprehensive review of the various inertia weight strategies reported in the related literature. These approaches are classified and discussed in three main groups: constant, time-varying and adaptive inertia weights. A new adaptive inertia weight approach is also proposed which uses the success rate of the swarm as its feedback parameter to ascertain the particles‰Ûª situation in the search space. The empirical studies on fifteen static test problems, a dynamic function and a real world engineering problem show that the proposed particle swarm optimization model is quite effective in adapting the value of w in the dynamic and static environments." }, @ARTICLE{Ratnaweera2004, author={Ratnaweera, A. and Halgamuge, S. and Watson, H.C.}, journal={Evolutionary Computation, IEEE Transactions on}, title={Self-organizing hierarchical particle swarm optimizer with time-varying acceleration coefficients}, year={2004}, volume={8}, number={3}, pages={240-255}, abstract={This paper introduces a novel parameter automation strategy for the particle swarm algorithm and two further extensions to improve its performance after a predefined number of generations. Initially, to efficiently control the local search and convergence to the global optimum solution, time-varying acceleration coefficients (TVAC) are introduced in addition to the time-varying inertia weight factor in particle swarm optimization (PSO). From the basis of TVAC, two new strategies are discussed to improve the performance of the PSO. First, the concept of "mutation" is introduced to the particle swarm optimization along with TVAC (MPSO-TVAC), by adding a small perturbation to a randomly selected modulus of the velocity vector of a random particle by predefined probability. Second, we introduce a novel particle swarm concept "self-organizing hierarchical particle swarm optimizer with TVAC (HPSO-TVAC)". Under this method, only the "social" part and the "cognitive" part of the particle swarm strategy are considered to estimate the new velocity of each particle and particles are reinitialized whenever they are stagnated in the search space. In addition, to overcome the difficulties of selecting an appropriate mutation step size for different problems, a time-varying mutation step size was introduced. Further, for most of the benchmarks, mutation probability is found to be insensitive to the performance of MPSO-TVAC method. On the other hand, the effect of reinitialization velocity on the performance of HPSO-TVAC method is also observed. Time-varying reinitialization step size is found to be an efficient parameter optimization strategy for HPSO-TVAC method. The HPSO-TVAC strategy outperformed all the methods considered in this investigation for most of the functions. Furthermore, it has also been observed that both the MPSO and HPSO strategies perform poorly when the acceleration coefficients are fixed at two.}, keywords={optimisation;parameter estimation;internal combustion spark ignition engines;parameter automation strategy;parameter estimation;self-organizing hierarchical particle swarm optimizer;time-varying acceleration coefficients;time-varying inertia weight factor;Acceleration;Automatic control;Automation;Genetic mutations;Manufacturing;Optimization methods;Particle swarm optimization;Personnel;Power engineering and energy;Scholarships;Acceleration coefficients;hierarchical particle swarm;mutation;particle swarm;reinitialization}, doi={10.1109/TEVC.2004.826071}, ISSN={1089-778X},}, @article{sedighizadeh2009particle, title={Particle swarm optimization methods, taxonomy and applications}, author={Sedighizadeh, Davoud and Masehian, Ellips}, journal={International Journal of Computer Theory and Engineering}, volume={1}, number={5}, pages={1793--8201}, year={2009} }, @ARTICLE{Qin2006, author={Qin, Z.a b and Yu, F.a and Shi, Z.a and Wang, Y.b }, title={Adaptive inertia weight particle swarm optimization}, journal={Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, year={2006}, volume={4029 LNAI}, pages={450-459}, note={cited By (since 1996) 11}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-33746239398&partnerID=40&md5=735e5e95ea1f009867a929942dfdba3e}, affiliation={Department of Computer Science and Technology, Xian JiaoTong University, Xian 710049, China; Department of Computer Science and Technology, Tsinghua University, Beijing 100084, China}, abstract={Adaptive inertia weight is proposed to rationally balance the global exploration and local exploitation abilities for particle swarm optimization. The resulting algorithm is called adaptive inertia weight particle swarm optimization algorithm (AIW-PSO) where a simple and effective measure, individual search ability (ISA), is defined to indicate whether each particle lacks global exploration or local exploitation abilities in each dimension. A transform function is employed to dynamically calculate the values of inertia weight according to ISA. In each iteration during the run, every particle can choose appropriate inertia weight along every dimension of search space according to its own situation. By this fine strategy of dynamically adjusting inertia weight, the performance of PSO algorithm could be improved. In order to demonstrate the effectiveness of AIW-PSO, comprehensive experiments were conducted on three well-known benchmark functions with 10, 20, and 30 dimensions. AIW-PSO was compared with linearly decreasing inertia weight PSO, fuzzy adaptive inertia weight PSO and random number inertia weight PSO. Experimental results show that AIW-PSO achieves good performance and outperforms other algorithms. © Springer-Verlag Berlin Heidelberg 2006.}, document_type={Conference Paper}, source={Scopus}, }, @article{shelokar2007particle, title={Particle swarm and ant colony algorithms hybridized for improved continuous optimization}, author={Shelokar, PS and Siarry, Patrick and Jayaraman, VK and Kulkarni, BD}, journal={Applied mathematics and computation}, volume={188}, number={1}, pages={129--142}, year={2007}, publisher={Elsevier} }, @ARTICLE{Parsopoulos2004, author={Parsopoulos, K.E. and Vrahatis, M.N.}, journal={Evolutionary Computation, IEEE Transactions on}, title={On the computation of all global minimizers through particle swarm optimization}, year={2004}, volume={8}, number={3}, pages={211-224}, abstract={This paper presents approaches for effectively computing all global minimizers of an objective function. The approaches include transformations of the objective function through the recently proposed deflection and stretching techniques, as well as a repulsion source at each detected minimizer. The aforementioned techniques are incorporated in the context of the particle swarm optimization (PSO) method, resulting in an efficient algorithm which has the ability to avoid previously detected solutions and, thus, detect all global minimizers of a function. Experimental results on benchmark problems originating from the fields of global optimization, dynamical systems, and game theory, are reported, and conclusions are derived.}, keywords={computational complexity;evolutionary computation;game theory;minimisation;time-varying systems;deflection technique;dynamical systems;game theory;global minimizer computation;global optimization;objective function;particle swarm optimization;stretching technique;Computational complexity;Game theory;Genetic programming;Helium;Least squares methods;Mathematics;Optimization methods;Orbits;Particle swarm optimization;Stochastic processes;Deflection technique;Nash equilibria;PSO;detecting all minimizers;dynamical systems;particle swarm optimization;periodic orbits;stretching technique}, doi={10.1109/TEVC.2004.826076}, ISSN={1089-778X},}, @article{Brits2007, title = "Locating multiple optima using particle swarm optimization", journal = "Applied Mathematics and Computation", volume = "189", number = "2", pages = "1859 - 1883", year = "2007", note = "", issn = "0096-3003", doi = "10.1016/j.amc.2006.12.066", url = "http://www.sciencedirect.com/science/article/pii/S0096300306017826", author = "R. Brits and A.P. Engelbrecht and F. van den Bergh", keywords = "Particle swarm optimization", keywords = "Niching", keywords = "Speciation", abstract = "Many scientific and engineering applications require optimization methods to find more than one solution to multi-modal optimization problems. This paper presents a new particle swarm optimization (PSO) technique to locate and refine multiple solutions to such problems. The technique, NichePSO, extends the inherent unimodal nature of the standard PSO approach by growing multiple swarms from an initial particle population. Each subswarm represents a different solution or niche; optimized individually. The outcome of the NichePSO algorithm is a set of particle swarms, each representing a unique solution. Experimental results are provided to show that NichePSO can successfully locate all optima on a small set of test functions. These results are compared with another PSO niching algorithm, lbest PSO, and two genetic algorithm niching approaches. The influence of control parameters is investigated, including the relationship between the swarm size and the number of solutions (niches). An initial scalability study is also done." }, @ARTICLE{Parrott2006, author={Parrott, D. and Xiaodong Li}, journal={Evolutionary Computation, IEEE Transactions on}, title={Locating and tracking multiple dynamic optima by a particle swarm model using speciation}, year={2006.}, volume={10}, number={4}, pages={440-458}, abstract={This paper proposes an improved particle swarm optimizer using the notion of species to determine its neighborhood best values for solving multimodal optimization problems and for tracking multiple optima in a dynamic environment. In the proposed species-based particle swam optimization (SPSO), the swarm population is divided into species subpopulations based on their similarity. Each species is grouped around a dominating particle called the species seed. At each iteration step, species seeds are identified from the entire population, and then adopted as neighborhood bests for these individual species groups separately. Species are formed adaptively at each step based on the feedback obtained from the multimodal fitness landscape. Over successive iterations, species are able to simultaneously optimize toward multiple optima, regardless of whether they are global or local optima. Our experiments on using the SPSO to locate multiple optima in a static environment and a dynamic SPSO (DSPSO) to track multiple changing optima in a dynamic environment have demonstrated that SPSO is very effective in dealing with multimodal optimization functions in both environments}, keywords={particle swarm optimisation;multimodal optimization problems;multiple dynamic optima;speciation;species-based particle swam optimization;Australia;Computer science;Feedback;Information technology;Particle swarm optimization;Particle tracking;Shape;Multimodal optimization;optimization in dynamic environments;particle swam optimization (PSO);tracking optima in dynamic environments}, doi={10.1109/TEVC.2005.859468}, ISSN={1089-778X},}, @article{vandenBergh2006, title = "A study of particle swarm optimization particle trajectories", journal = "Information Sciences", volume = "176", number = "8", pages = "937 - 971", year = "2006", note = "", issn = "0020-0255", doi = "10.1016/j.ins.2005.02.003", url = "http://www.sciencedirect.com/science/article/pii/S0020025505000630", author = "F. van den Bergh and A.P. Engelbrecht", keywords = "Particle swarm optimization", keywords = "Particle trajectories", keywords = "Equilibrium", keywords = "Convergence", abstract = "Particle swarm optimization (PSO) has shown to be an efficient, robust and simple optimization algorithm. Most of the PSO studies are empirical, with only a few theoretical analyses that concentrate on understanding particle trajectories. These theoretical studies concentrate mainly on simplified PSO systems. This paper overviews current theoretical studies, and extend these studies to investigate particle trajectories for general swarms to include the influence of the inertia term. The paper also provides a formal proof that each particle converges to a stable point. An empirical analysis of multi-dimensional stochastic particles is also presented. Experimental results are provided to support the conclusions drawn from the theoretical findings." }, @article{Blum2005, title = "Ant colony optimization: Introduction and recent trends", journal = "Physics of Life Reviews", volume = "2", number = "4", pages = "353 - 373", year = "2005", note = "", issn = "1571-0645", doi = "10.1016/j.plrev.2005.10.001", url = "http://www.sciencedirect.com/science/article/pii/S1571064505000333", author = "Christian Blum", keywords = "Ant colony optimization", keywords = "Discrete optimization", keywords = "Hybridization", abstract = "Ant colony optimization is a technique for optimization that was introduced in the early 1990's. The inspiring source of ant colony optimization is the foraging behavior of real ant colonies. This behavior is exploited in artificial ant colonies for the search of approximate solutions to discrete optimization problems, to continuous optimization problems, and to important problems in telecommunications, such as routing and load balancing. First, we deal with the biological inspiration of ant colony optimization algorithms. We show how this biological inspiration can be transfered into an algorithm for discrete optimization. Then, we outline ant colony optimization in more general terms in the context of discrete optimization, and present some of the nowadays best-performing ant colony optimization variants. After summarizing some important theoretical results, we demonstrate how ant colony optimization can be applied to continuous optimization problems. Finally, we provide examples of an interesting recent research direction: The hybridization with more classical techniques from artificial intelligence and operations research." }, @article{dorigo2006introduction, title={An Introduction to Ant Colony Optimization}, author={Dorigo, Marco and Krzysztof, Socha}, journal={IRIDIA Technical Report Series}, year={2006} }, @article{roy2011novel, title={A Novel Particle Swarm Optimization Algorithm for Multi-Objective Combinatorial Optimization Problem}, author={Roy, Rahul and Dehuri, Satchidananda and Cho, Sung Bae}, journal={International Journal of Applied Metaheuristic Computing (IJAMC)}, volume={2}, number={4}, pages={41--57}, year={2011}, publisher={IGI Global} }, @CONFERENCE{Li2006, author = {Li, Xiaodong}, title = {Particle swarm optimization: an introduction and its recent developments}, booktitle = {School of Computer Science and IT(2006), RMIT University}, year = {2006}, series = {SEAL'06}, keywords = {evolutionary computation, particle swarm optimization, swarm intelligence}, location = {Melbourne,Australia} }, @CONFERENCE{Li2007, author = {Li, Xiaodong and Engelbrecht, Andries P.}, title = {Particle swarm optimization: an introduction and its recent developments}, booktitle = {Proceedings of the 2007 GECCO conference companion on Genetic and evolutionary computation}, year = {2007}, series = {GECCO '07}, pages = {3391--3414}, address = {New York, NY, USA}, publisher = {ACM}, acmid = {1274118}, doi = {10.1145/1274000.1274118}, isbn = {978-1-59593-698-1}, keywords = {evolutionary computation, particle swarm optimization, swarm intelligence}, location = {London, United Kingdom}, numpages = {24}, url = {http://doi.acm.org/10.1145/1274000.1274118} }, @INPROCEEDINGS{Kennedy1995, author={Kennedy, J. and Eberhart, R.}, booktitle={Neural Networks, 1995. Proceedings., IEEE International Conference on}, title={Particle swarm optimization}, year={1995}, volume={4}, pages={1942-1948 vol.4}, keywords={artificial intelligence;genetic algorithms;neural nets;search problems;simulation;artificial life;evolution;genetic algorithms;multidimensional search;neural network;nonlinear functions;optimization;particle swarm;simulation;social metaphor;Artificial neural networks;Birds;Educational institutions;Genetic algorithms;Humans;Marine animals;Optimization methods;Particle swarm optimization;Performance evaluation;Testing} abstract={A concept for the optimization of nonlinear functions using particle swarm methodology is introduced. The evolution of several paradigms is outlined, and an implementation of one of the paradigms is discussed. Benchmark testing of the paradigm is described, and applications, including nonlinear function optimization and neural network training, are proposed. The relationships between particle swarm optimization and both artificial life and genetic algorithms are described}, keywords={artificial intelligence;genetic algorithms;neural nets;search problems;simulation;artificial life;evolution;genetic algorithms;multidimensional search;neural network;nonlinear functions;optimization;particle swarm;simulation;social metaphor;Artificial neural networks;Birds;Educational institutions;Genetic algorithms;Humans;Marine animals;Optimization methods;Particle swarm optimization;Performance evaluation;Testing}, doi={10.1109/ICNN.1995.488968},}, @INPROCEEDINGS{Eberhart1995, author={Eberhart, R. and Kennedy, J.}, booktitle={Micro Machine and Human Science, 1995. MHS '95., Proceedings of the Sixth International Symposium on}, title={A new optimizer using particle swarm theory}, year={1995}, pages={39-43}, abstract={The optimization of nonlinear functions using particle swarm methodology is described. Implementations of two paradigms are discussed and compared, including a recently developed locally oriented paradigm. Benchmark testing of both paradigms is described, and applications, including neural network training and robot task learning, are proposed. Relationships between particle swarm optimization and both artificial life and evolutionary computation are reviewed}, keywords={algorithm theory;feedforward neural nets;genetic algorithms;intelligent control;learning (artificial intelligence);multilayer perceptrons;optimisation;artificial life;benchmark testing;bird flocks;evolutionary computation;gbest;genetic algorithms;globally oriented concept;hyperspace;lbest;locally oriented paradigm;multilayer perceptron;neural network training;nonlinear functions;optimization;particle swarm theory;pbest;robot task learning;Acceleration;Artificial neural networks;Evolutionary computation;Genetic algorithms;Optimization methods;Particle swarm optimization;Particle tracking;Performance evaluation;Statistics;Testing}, doi={10.1109/MHS.1995.494215},}, @INPROCEEDINGS{Ozcan1999, author={Ozcan, E. and Mohan, C.K.}, booktitle={Evolutionary Computation, 1999. CEC 99. Proceedings of the 1999 Congress on}, title={Particle swarm optimization: surfing the waves}, year={1999}, volume={3}, pages={-1944 Vol. 3}, abstract={A new optimization method has been proposed by J. Kennedy and R.C. Eberhart (1997; 1995), called Particle Swarm Optimization (PSO). This approach combines social psychology principles and evolutionary computation. It has been applied successfully to nonlinear function optimization and neural network training. Preliminary formal analyses showed that a particle in a simple one-dimensional PSO system follows a path defined by a sinusoidal wave, randomly deciding on both its amplitude and frequency (Y. Shi and R. Eberhart, 1998). The paper takes the next step, generalizing to obtain closed form equations for trajectories of particles in a multi-dimensional search space}, keywords={evolutionary computation;psychology;random processes;search problems;closed form equations;evolutionary computation;formal analyses;multi-dimensional search space;neural network training;nonlinear function optimization;one-dimensional PSO system;optimization method;particle swarm optimization;particle trajectories;sinusoidal wave;social psychology principles;surfing;Computer science;Educational institutions;Equations;Frequency;Genetic algorithms;Marine animals;Optimization methods;Organisms;Particle swarm optimization;Psychology}, doi={10.1109/CEC.1999.785510},}, @INPROCEEDINGS{Yasuda2003, author={Yasuda, K. and Ide, A. and Iwasaki, N.}, booktitle={Systems, Man and Cybernetics, 2003. IEEE International Conference on}, title={Adaptive particle swarm optimization}, year={2003}, volume={2}, pages={1554-1559 vol.2}, abstract={The particle swarm optimization (PSO) method is one of the most powerful methods for solving unconstrained and constrained global optimization problems. Little is, however, known about how the PSO method works or finds a globally optimal solution of a global optimization problem when the method is applied to global optimization problems. This paper deals with the analysis of the dynamics of PSO in order to obtain an understanding about how it searches a globally optimal solution and a strategy about how to tune its parameters. While a generalized reduced model of PSO is proposed in order to analyze the dynamics of PSO, the stability analysis is carried out on the basis of both the eigenvalue analysis and some numerical simulations on a typical global optimization problem.}, keywords={eigenvalues and eigenfunctions;optimisation;stability;PSO;Particle Swarm Optimization;eigenvalue analysis;global optimization;numerical simulation;optimal solution;stability analysis;Algorithm design and analysis;Constraint optimization;Eigenvalues and eigenfunctions;Equations;Numerical simulation;Optimization methods;Particle swarm optimization;Power engineering and energy;Stability analysis}, doi={10.1109/ICSMC.2003.1244633}, ISSN={1062-922X},}, @INPROCEEDINGS{Zheng2003, author={Yong-ling Zheng and Long-Hua Ma and Li-yan Zhang and Ji-xin Qian}, booktitle={Machine Learning and Cybernetics, 2003 International Conference on}, title={On the convergence analysis and parameter selection in particle swarm optimization}, year={2003}, volume={3}, pages={1802-1807 Vol.3}, abstract={A PSO with increasing inertia weight, distinct from a widely used PSO with decreasing inertia weight, is proposed in this paper. Far from drawing conclusions from sole empirical study or rule of thumb, this algorithm is derived from particle trajectory study and convergence analysis. Four standard test functions are used to confirm its validity finally. From the experiments, it is clear that a PSO with increasing inertia weight outperforms the one with decreasing inertia weight, both in convergent speed and solution precision, with no additional computing load.}, keywords={convergence;optimisation;search problems;convergence analysis;inertia weight;parameter selection;particle swarm optimization;particle trajectory study;thumb rule;Algorithm design and analysis;Birds;Control systems;Convergence;Engineering drawings;Equations;Particle swarm optimization;Systems engineering and theory;Testing;Thumb}, doi={10.1109/ICMLC.2003.1259789},}, @INPROCEEDINGS{Kennedy1997, author={Kennedy, J. and Eberhart, R.C.}, booktitle={Systems, Man, and Cybernetics, 1997. Computational Cybernetics and Simulation., 1997 IEEE International Conference on}, title={A discrete binary version of the particle swarm algorithm}, year={1997}, volume={5}, pages={4104-4108 vol.5}, abstract={The particle swarm algorithm adjusts the trajectories of a population of ÒparticlesÓ through a problem space on the basis of information about each particle's previous best performance and the best previous performance of its neighbors. Previous versions of the particle swarm have operated in continuous space, where trajectories are defined as changes in position on some number of dimensions. The paper reports a reworking of the algorithm to operate on discrete binary variables. In the binary version, trajectories are changes in the probability that a coordinate will take on a zero or one value. Examples, applications, and issues are discussed}, keywords={optimisation;probability;problem solving;continuous space;discrete binary version;optimization;particle swarm algorithm;performance;population;position changes;probability;problem solving;trajectories;Hypercubes;Particle swarm optimization;Random number generation;Robustness;Routing;Space technology;State-space methods;Statistics;Stochastic processes}, doi={10.1109/ICSMC.1997.637339}, ISSN={1062-922X},}, @INPROCEEDINGS{Chongpeng2007, author={Huang Chongpeng and Zhang Yuling and Jiang Dingguo and Xu Baoguo}, booktitle={Control Conference, 2007. CCC 2007. Chinese}, title={On Some Non-linear Decreasing Inertia Weight Strategies in Particle Swarm Optimization}, year={2007}, pages={750-753}, abstract={Inspired by analyzing principle of PSO, some non-linear strategies for decreasing inertia weight (DIW) are proposed based on the existing linear DIW (LDIW), in this paper. Then a power function is designed to unify them. Four benchmark functions are used to evaluate these strategies on the PSO performance and select the best one. The experimental results show that for most continuous optimization problems, the best one gains an advantage over the linear strategy and others. It has more varieties of the swarm at the early stages so can escape from local minimum more easily, and also can speed up the convergence of particles at the later stages to improve the performance of PSO.}, keywords={continuous systems;nonlinear control systems;particle swarm optimisation;benchmark functions;continuous optimization problems;nonlinear decreasing inertia weight strategies;particle swarm optimization;Acceleration;Communication system control;Control engineering;Convergence;Equations;Evolutionary computation;Particle swarm optimization;Testing;Decreasing Strategy;Inertia Weight;Particle Swarm Optimization;Power Function}, doi={10.1109/CHICC.2006.4347175},}, @inproceedings{Rada-Vilela2011, author = {Rada-Vilela, Juan and Zhang, Mengjie and Seah, Winston}, title = {A performance study on synchronous and asynchronous updates in particle swarm optimization}, booktitle = {Proceedings of the 13th annual conference on Genetic and evolutionary computation}, series = {GECCO '11}, year = {2011}, isbn = {978-1-4503-0557-0}, location = {Dublin, Ireland}, pages = {21--28}, numpages = {8}, url = {http://doi.acm.org/10.1145/2001576.2001581}, doi = {10.1145/2001576.2001581}, acmid = {2001581}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {particle swarm optimization, speed of convergence, synchronous and asynchronous updates}, }, @INPROCEEDINGS{Shi1998, author={Yuhui Shi and Eberhart, R.}, booktitle={Evolutionary Computation Proceedings, 1998. IEEE World Congress on Computational Intelligence., The 1998 IEEE International Conference on}, title={A modified particle swarm optimizer}, year={1998}, pages={69-73}, abstract={Evolutionary computation techniques, genetic algorithms, evolutionary strategies and genetic programming are motivated by the evolution of nature. A population of individuals, which encode the problem solutions are manipulated according to the rule of survival of the fittest through ÒgeneticÓ operations, such as mutation, crossover and reproduction. A best solution is evolved through the generations. In contrast to evolutionary computation techniques, Eberhart and Kennedy developed a different algorithm through simulating social behavior (R.C. Eberhart et al., 1996; R.C. Eberhart and J. Kennedy, 1996; J. Kennedy and R.C. Eberhart, 1995; J. Kennedy, 1997). As in other algorithms, a population of individuals exists. This algorithm is called particle swarm optimization (PSO) since it resembles a school of flying birds. In a particle swarm optimizer, instead of using genetic operators, these individuals are ÒevolvedÓ by cooperation and competition among the individuals themselves through generations. Each particle adjusts its flying according to its own flying experience and its companions' flying experience. We introduce a new parameter, called inertia weight, into the original particle swarm optimizer. Simulations have been done to illustrate the significant and effective impact of this new parameter on the particle swarm optimizer}, keywords={genetic algorithms;iterative methods;search problems;competition;cooperation;evolutionary computation techniques;evolutionary strategies;flying birds;flying experience;genetic algorithms;genetic programming;inertia weight;modified particle swarm optimizer;particle swarm optimization;social behavior simulation;survival of the fittest;Birds;Collaboration;Computational modeling;Educational institutions;Evolutionary computation;Genetic algorithms;Genetic mutations;Genetic programming;Nonlinear equations;Particle swarm optimization}, doi={10.1109/ICEC.1998.699146},}, @INPROCEEDINGS{Kennedy2002, author={Kennedy, J. and Mendes, R.}, booktitle={Evolutionary Computation, 2002. CEC '02. Proceedings of the 2002 Congress on}, title={Population structure and particle swarm performance}, year={2002}, volume={2}, pages={1671-1676}, abstract={The effects of various population topologies on the particle swarm algorithm were systematically investigated. Random graphs were generated to specifications, and their performance on several criteria was compared. What makes a good population structure? We discovered that previous assumptions may not have been correct}, keywords={evolutionary computation;graph theory;optimisation;particle swarm algorithm;particle swarm performance;population structure;random graphs;Algorithm design and analysis;Lattices;Particle swarm optimization;Performance analysis;Performance evaluation;Region 7;Social network services;Statistics;Testing;Topology}, doi={10.1109/CEC.2002.1004493},}, @INPROCEEDINGS{Peram2003, author={Peram, T. and Veeramachaneni, K. and Mohan, C.K.}, booktitle={Swarm Intelligence Symposium, 2003. SIS '03. Proceedings of the 2003 IEEE}, title={Fitness-distance-ratio based particle swarm optimization}, year={2003}, pages={174-181}, keywords={convergence of numerical methods;evolutionary computation;optimisation;search problems;FDR-PSO;fitness-distance ratio;global optima search;multimodal optimization problems;particle position;particle swarm optimization;premature convergence;relative fitness;Animals;Application software;Cognition;Computer science;Convergence;Evolutionary computation;Particle swarm optimization;Performance analysis;Power engineering and energy;Problem-solving}, doi={10.1109/SIS.2003.1202264},}, @INPROCEEDINGS{Liang2005, author={Liang, J. J. and Suganthan, P.N.}, booktitle={Swarm Intelligence Symposium, 2005. SIS 2005. Proceedings 2005 IEEE}, title={Dynamic multi-swarm particle swarm optimizer}, year={2005}, pages={124-129}, abstract={In this paper, a novel dynamic multi-swarm particle swarm optimizer (PSO) is introduced. Different from the existing multi-swarm PSOs and the local version of PSO, the swarms are dynamic and the swarms' size is small. The whole population is divided into many small swarms, these swarms are regrouped frequently by using various regrouping schedules and information is exchanged among the swarms. Experiments are conducted on a set of shifted rotated benchmark functions and results show its better performance when compared with some recent PSO variants.}, keywords={particle swarm optimisation;benchmark functions;dynamic multiswarm particle swarm optimizer;swarm regrouping schedules;Acceleration;Birds;Equations;Particle swarm optimization}, doi={10.1109/SIS.2005.1501611},}, @INPROCEEDINGS{Angeline1998, author={Angeline, P.J.}, booktitle={Evolutionary Computation Proceedings, 1998. IEEE World Congress on Computational Intelligence., The 1998 IEEE International Conference on}, title={Using selection to improve particle swarm optimization}, year={1998}, pages={84-89}, abstract={This paper describes a evolutionary optimization algorithm that is a hybrid based on the particle swarm algorithm but with the addition of a standard selection mechanism from evolutionary computations. A comparison is performed between the hybrid swarm and the ordinary particle swarm that shows selection to provide an advantage for some (but not all) complex functions}, keywords={functions;genetic algorithms;complex functions;evolutionary computations;evolutionary optimization algorithm;hybrid swarm algorithm;particle swarm algorithm;selection mechanism;Acceleration;Equations;Evolutionary computation;Functional programming;Genetic mutations;Insects;Particle swarm optimization;Random variables;Search methods;World Wide Web}, doi={10.1109/ICEC.1998.699327},}, @INPROCEEDINGS{Liang2005-2, author={Liang, J. J. and Suganthan, P.N.}, booktitle={Evolutionary Computation, 2005. The 2005 IEEE Congress on}, title={Dynamic multi-swarm particle swarm optimizer with local search}, year={2005}, volume={1}, pages={522-528 Vol.1}, abstract={In this paper, the performance of a modified dynamic multi-swarm particle swarm optimizer (DMS-PSO) on the set of benchmark functions provided by CEC2005 is reported. Different from the existing multi-swarm PSOs and local versions of PSO, the swarms are dynamic and the swarms' size is small. The whole population is divided into many small swarms, these swarms are regrouped frequently by using various regrouping schedules and information is exchanged among the swarms. The quasi-Newton method is combined to improve its local search ability}, keywords={Newton method;particle swarm optimisation;search problems;dynamic multiswarm particle swarm optimizer;local search problem;quasiNewton method;Acceleration;Birds;Convergence;Diversity reception;Equations;Evolutionary computation;Optimization methods;Particle swarm optimization;Testing;Topology}, doi={10.1109/CEC.2005.1554727},}, @INPROCEEDINGS{Zhang2003, author={Wen-Jun Zhang and Xiao-Feng Xie}, booktitle={Systems, Man and Cybernetics, 2003. IEEE International Conference on}, title={DEPSO: hybrid particle swarm with differential evolution operator}, year={2003}, volume={4}, pages={3816-3821 vol.4}, abstract={A hybrid particle swarm with differential evolution operator, termed DEPSO, which provide the bell-shaped mutations with consensus on the population diversity along with the evolution, while keeping the self-organized particle swarm dynamics, is proposed. Then it is applied to a set of benchmark functions, and the experimental results illustrate its efficiency.}, keywords={evolutionary computation;mathematical operators;optimisation;bell-shaped mutations;differential evolution operator;hybrid particle swarm;particle swarm optimization;population diversity;self-organized particle swarm dynamics;Acceleration;Chromium;Differential equations;Gaussian distribution;Genetic mutations;Microelectronics;Particle swarm optimization;Topology}, doi={10.1109/ICSMC.2003.1244483}, ISSN={1062-922X},} keywords={evolutionary computation;mathematical operators;optimisation;bell-shaped mutations;differential evolution operator;hybrid particle swarm;particle swarm optimization;population diversity;self-organized particle swarm dynamics;Acceleration;Chromium;Differential equations;Gaussian distribution;Genetic mutations;Microelectronics;Particle swarm optimization;Topology}, doi={10.1109/ICSMC.2003.1244483}, ISSN={1062-922X},}, @INPROCEEDINGS{Xiao-Feng2002, author={Xiao-Feng Xie and Wen-Jun Zhang and Zhi-Lian Yang}, booktitle={Evolutionary Computation, 2002. CEC '02. Proceedings of the 2002 Congress on}, title={Dissipative particle swarm optimization}, year={2002}, volume={2}, pages={1456-1461}, abstract={A dissipative particle swarm optimization is developed according to the self-organization of dissipative structure. The negative entropy is introduced to construct an opening dissipative system that is far-from-equilibrium so as to driving the irreversible evolution process with better fitness. The testing of two multimodal functions indicates it improves the performance effectively}, keywords={artificial life;evolutionary computation;dissipative particle swarm optimization;dissipative structure;evolutionary computation;fitness;irreversible evolution process;multimodal functions;negative entropy;performance;self-organization;Biological system modeling;Entropy;Evolution (biology);Evolutionary computation;Fluctuations;Microelectronics;Particle swarm optimization;Performance analysis;Testing;Thermodynamics}, doi={10.1109/CEC.2002.1004457},}, @INPROCEEDINGS{Krink02thelifecycle, author = {Thiemo Krink and Morten L¿vbjerg}, title = {The LifeCycle model: Combining Particle Swarm Optimisation, Genetic Algorithms and HillClimbers}, booktitle = {Proceedings of the 7th International Conference on Parallel Problem Solving from Nature}, year = {2002}, pages = {621--630} }, @INPROCEEDINGS{Lovbjerg2002, author={Lovbjerg, M. and Krink, T.}, booktitle={Evolutionary Computation, 2002. CEC '02. Proceedings of the 2002 Congress on}, title={Extending particle swarm optimisers with self-organized criticality}, year={2002}, volume={2}, pages={1588-1593}, abstract={Particle swarm optimisers (PSOs) show potential in function optimisation, but still have room for improvement. Self-organized criticality (SOC) can help control the PSO and add diversity. Extending the PSO with SOC seems promising reaching faster convergence and better solutions}, keywords={evolutionary computation;optimisation;convergence;function optimisation;particle swarm optimisers;self-organized criticality;Application software;Chaos;Computer science;Evolutionary computation;Genetic algorithms;Genetic mutations;Insects;Particle swarm optimization;Performance analysis;Weight control}, doi={10.1109/CEC.2002.1004479},}, @inproceedings{Carlisle2001, author = {Carlisle, A. and Dozier, G.}, booktitle = {{PSO} Workshop}, citeulike-article-id = {6592019}, title = {An Off-The-Shelf {PSO}}, year = {2001}, abstract = {What attributes and settings of the Particle Swarm Optimizer constants result in a good, off-the-shelf, PSO implementation? There are many parameters, both explicit and implicit, associated with the Particle Swarm Optimizer that may affect its performance. There are the social and cognitive learning rates and magnitudes, the population size, the neighborhood size (including global neighborhoods), synchronous or asynchronous updates, and various additional controls, such as inertia and constriction factors. For any given problem, the values and choices for some of these parameters may have significant impact on the efficiency and reliability of the PSO, and yet varying other parameters may have little or no effect. What set of values, then, constitutes a good, general purpose PSO? While some of these factors have been investigated in the literature, others have not. In this paper we use existing literature and a selection of benchmark problems to determine a set of starting values suitable for an “off the shelf PSO.}, }, @CONFERENCE{Suresh2008, author={Suresh, K.a and Ghosh, S.a and Kundu, D.a and Sen, A.a and Das, S.a and Abraham, A.b }, title={Inertia-adaptive particle swarm optimizer for improved global search}, journal={Proceedings - 8th International Conference on Intelligent Systems Design and Applications, ISDA 2008}, year={2008}, volume={2}, pages={253-258}, art_number={4696340}, note={cited By (since 1996) 7}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-67449124383&partnerID=40&md5=8310e199951ac9d0931f7160d1c2c53f}, affiliation={Department of Electronics and Telecommunication Engineering, Jadavpur University, Kolkata, India; Center of Excellence for Quantifiable Quality of Service, Norwegian University of Science and Technology, Trondheim, Norway}, abstract={This paper describes a method for improving the final accuracy and the convergence speed of Particle Swarm Optimization (PSO) by adapting its inertia factor in the velocity updating equation and also by adding a new coefficient to the position updating equation. These modifications do not impose any serious requirements on the basic algorithm in terms of the number of Function Evaluations (FEs). The new algorithm has been shown to be statistically significantly better than four recent variants of PSO on an eight-function test-suite for the following performance matrices: Quality of the final solution, time to find out the solution, frequency of hitting the optima, and scalability. © 2008 IEEE.}, document_type={Conference Paper}, source={Scopus}, }, @INPROCEEDINGS{Eberhart2000, author={Eberhart, R.C. and Shi, Y.}, booktitle={Evolutionary Computation, 2000. Proceedings of the 2000 Congress on}, title={Comparing inertia weights and constriction factors in particle swarm optimization}, year={2000}, volume={1}, pages={84-88 vol.1}, abstract={The performance of particle swarm optimization using an inertia weight is compared with performance using a constriction factor. Five benchmark functions are used for the comparison. It is concluded that the best approach is to use the constriction factor while limiting the maximum velocity Vmax to the dynamic range of the variable Xmax on each dimension. This approach provides performance on the benchmark functions superior to any other published results known by the authors}, keywords={evolutionary computation;benchmark functions;constriction factors;inertia weights;particle swarm optimization;Acceleration;Computational modeling;Dynamic range;Evolutionary computation;Genetic algorithms;Nonlinear equations;Particle swarm optimization;Random number generation}, doi={10.1109/CEC.2000.870279},}, @phdthesis{Dor1992thesis, author = {Marco Dorigo}, title = {Optimization, Learning and Natural Algorithms (in {I}talian)}, school = {Dipartimento di Elettronica, Politecnico di Milano}, year = {1992}, address = {Milan, Italy}, keyword = {metaheuristics, ant colony optimization, robotics, swarm intelligence} }, @phdthesis{evers2009automatic, title={An automatic regrouping mechanism to deal with stagnation in particle swarm optimization}, author={Evers, George I}, year={2009}, school={University of Texas-Pan American} }, @phdthesis{pedersen2010tuning, title={Tuning \& simplifying heuristical optimization}, author={Pedersen, Magnus Erik Hvass}, year={2010}, school={PhD thesis, University of Southampton} }, @phdthesis{VanDenBergh:2002, author = {Van Den Bergh, Frans}, title = {An analysis of particle swarm optimizers}, year = {2002}, note = {AAI0804353}, publisher = {University of Pretoria}, address = {Pretoria, South Africa, South Africa}, }, @OTHER{ACO_scholarpedia:Online, url = {http://www.scholarpedia.org/article/Ant_colony_optimization }, @OTHER{NetLogo_northwestern:Online, author = {Wilensky, U.}, location = { Evanston, IL}, note = {Center for Connected Learning and Computer-Based Modeling, Northwestern University}, url = {http://ccl.northwestern.edu/netlogo/}, year = {1999} }, @OTHER{PSO_scholarpedia:Online, url = {http://www.scholarpedia.org/article/Particle_swarm_optimization} }, @OTHER{css:Online, url = {http://librosweb.es/css/} }, @OTHER{cssAvanzado:Online, url = {http://librosweb.es/css_avanzado/} }, @OTHER{NetLogo_turtlezero:Online, url = {http://www.turtlezero.com/wiki/doku.php/} }, @OTHER{ParticleSawarm:Online, url = {http://www.particleswarm.info/Programs.html} }, @OTHER{Protege_Stanford:Online, url = {http://protege.stanford.edu/doc/users.html#tutorials} }, @OTHER{ProtegeOntology:Online, url = {http://protege.stanford.edu/publications/ontology_development/ontology101-noy-mcguinness.html} }, @OTHER{SwarmIntelligence:Online, url = {http://www.swarmintelligence.org/} }, @OTHER{syml:Online, url = {http://www.omgsysml.org/} }, @OTHER{sysml_Papyrus:Online, url = {http://www.eclipse.org/papyrus/usersTutorials/resources/TutorialOnPapyrusUSE_d20101001.pdf} }
code_algernon ;;En Proyect->configure activar Algernon y limpiar la ventana ;;Copiar y pegar el contenido del valor code_algernon en la ventana ;;; presionar Tell ( (:FOR 1 ?n ?references (:INSTANCE REFERENCES ?references)) (:INSTANCE BIBTEX ?bibtex-inst) (latex-reference ?references ?latex-reference) (bibitems ?bibtex-inst ?latex-reference) )

  ^ back to top

Return to Class Hierarchy

Generated: 07/04/2013, 1:02:37 AM, Hora de verano de Europa Central

Protégé is a trademark of Stanford University, Copyright (c) 1998-2011 Stanford University.