Improve the astropy skill

This commit is contained in:
Timothy Kassis
2025-11-03 16:34:14 -08:00
parent d57129ca3f
commit 86d8878eeb
14 changed files with 2679 additions and 2347 deletions

View File

@@ -57,7 +57,7 @@
- **UMAP-learn** - Dimensionality reduction and manifold learning
## Materials Science & Chemistry
- **Astropy** - Astronomy and astrophysics (coordinates, cosmology, FITS files)
- **Astropy** - Comprehensive Python library for astronomy and astrophysics providing core functionality for astronomical research and data analysis. Includes coordinate system transformations (ICRS, Galactic, FK5, AltAz), physical units and quantities with automatic dimensional consistency, FITS file operations (reading, writing, manipulating headers and data), cosmological calculations (luminosity distance, lookback time, Hubble parameter, Planck/WMAP models), precise time handling across multiple time scales (UTC, TAI, TT, TDB) and formats (JD, MJD, ISO), table operations with unit support (FITS, CSV, HDF5, VOTable), WCS transformations between pixel and world coordinates, astronomical constants, modeling framework, visualization tools, and statistical functions. Use for celestial coordinate transformations, unit conversions, FITS image/table processing, cosmological distance calculations, barycentric time corrections, catalog cross-matching, and astronomical data analysis
- **COBRApy** - Constraint-based metabolic modeling and flux balance analysis
- **Pymatgen** - Materials structure analysis, phase diagrams, and electronic structure

File diff suppressed because it is too large Load Diff

View File

@@ -1,618 +0,0 @@
# Common Astropy Workflows
This document describes frequently used workflows when working with astronomical data using astropy.
## 1. Working with FITS Files
### Basic FITS Reading
```python
from astropy.io import fits
import numpy as np
# Open and examine structure
with fits.open('observation.fits') as hdul:
hdul.info()
# Access primary HDU
primary_hdr = hdul[0].header
primary_data = hdul[0].data
# Access extension
ext_data = hdul[1].data
ext_hdr = hdul[1].header
# Read specific header keywords
object_name = primary_hdr['OBJECT']
exposure = primary_hdr['EXPTIME']
```
### Writing FITS Files
```python
# Create new FITS file
from astropy.io import fits
import numpy as np
# Create data
data = np.random.random((100, 100))
# Create primary HDU
hdu = fits.PrimaryHDU(data)
hdu.header['OBJECT'] = 'M31'
hdu.header['EXPTIME'] = 300.0
# Write to file
hdu.writeto('output.fits', overwrite=True)
# Multi-extension FITS
hdul = fits.HDUList([
fits.PrimaryHDU(data1),
fits.ImageHDU(data2, name='SCI'),
fits.ImageHDU(data3, name='ERR')
])
hdul.writeto('multi_ext.fits', overwrite=True)
```
### FITS Table Operations
```python
from astropy.io import fits
# Read binary table
with fits.open('catalog.fits') as hdul:
table_data = hdul[1].data
# Access columns
ra = table_data['RA']
dec = table_data['DEC']
mag = table_data['MAG']
# Filter data
bright = table_data[table_data['MAG'] < 15]
# Write binary table
from astropy.table import Table
import astropy.units as u
t = Table([ra, dec, mag], names=['RA', 'DEC', 'MAG'])
t['RA'].unit = u.degree
t['DEC'].unit = u.degree
t.write('output_catalog.fits', format='fits', overwrite=True)
```
## 2. Coordinate Transformations
### Basic Coordinate Creation and Transformation
```python
from astropy.coordinates import SkyCoord
import astropy.units as u
# Create from RA/Dec
c = SkyCoord(ra=10.68458*u.degree, dec=41.26917*u.degree, frame='icrs')
# Alternative creation methods
c = SkyCoord('00:42:44.3 +41:16:09', unit=(u.hourangle, u.deg))
c = SkyCoord('00h42m44.3s +41d16m09s')
# Transform to different frames
c_gal = c.galactic
c_fk5 = c.fk5
print(f"Galactic: l={c_gal.l.deg}, b={c_gal.b.deg}")
```
### Coordinate Arrays and Separations
```python
import numpy as np
from astropy.coordinates import SkyCoord
import astropy.units as u
# Create array of coordinates
ra_array = np.array([10.1, 10.2, 10.3]) * u.degree
dec_array = np.array([40.1, 40.2, 40.3]) * u.degree
coords = SkyCoord(ra=ra_array, dec=dec_array, frame='icrs')
# Calculate separations
c1 = SkyCoord(ra=10*u.degree, dec=40*u.degree)
c2 = SkyCoord(ra=11*u.degree, dec=41*u.degree)
sep = c1.separation(c2)
print(f"Separation: {sep.to(u.arcmin)}")
# Position angle
pa = c1.position_angle(c2)
```
### Catalog Matching
```python
from astropy.coordinates import SkyCoord, match_coordinates_sky
import astropy.units as u
# Two catalogs of coordinates
catalog1 = SkyCoord(ra=[10, 11, 12]*u.degree, dec=[40, 41, 42]*u.degree)
catalog2 = SkyCoord(ra=[10.01, 11.02, 13]*u.degree, dec=[40.01, 41.01, 43]*u.degree)
# Find nearest neighbors
idx, sep2d, dist3d = catalog1.match_to_catalog_sky(catalog2)
# Filter by separation threshold
max_sep = 1 * u.arcsec
matched = sep2d < max_sep
matching_indices = idx[matched]
```
### Horizontal Coordinates (Alt/Az)
```python
from astropy.coordinates import SkyCoord, EarthLocation, AltAz
from astropy.time import Time
import astropy.units as u
# Observer location
location = EarthLocation(lat=40*u.deg, lon=-70*u.deg, height=300*u.m)
# Observation time
obstime = Time('2023-01-01 03:00:00')
# Target coordinate
target = SkyCoord(ra=10*u.degree, dec=40*u.degree, frame='icrs')
# Transform to Alt/Az
altaz_frame = AltAz(obstime=obstime, location=location)
target_altaz = target.transform_to(altaz_frame)
print(f"Altitude: {target_altaz.alt.deg}")
print(f"Azimuth: {target_altaz.az.deg}")
```
## 3. Units and Quantities
### Basic Unit Operations
```python
import astropy.units as u
# Create quantities
distance = 5.2 * u.parsec
time = 10 * u.year
velocity = 300 * u.km / u.s
# Unit conversion
distance_ly = distance.to(u.lightyear)
velocity_mps = velocity.to(u.m / u.s)
# Arithmetic with units
wavelength = 500 * u.nm
frequency = wavelength.to(u.Hz, equivalencies=u.spectral())
# Compose/decompose units
composite = (1 * u.kg * u.m**2 / u.s**2)
print(composite.decompose()) # Base SI units
print(composite.compose()) # Known compound units (Joule)
```
### Working with Arrays
```python
import numpy as np
import astropy.units as u
# Quantity arrays
wavelengths = np.array([400, 500, 600]) * u.nm
frequencies = wavelengths.to(u.THz, equivalencies=u.spectral())
# Mathematical operations preserve units
fluxes = np.array([1.2, 2.3, 1.8]) * u.Jy
luminosities = 4 * np.pi * (10*u.pc)**2 * fluxes
```
### Custom Units and Equivalencies
```python
import astropy.units as u
# Define custom unit
beam = u.def_unit('beam', 1.5e-10 * u.steradian)
# Register for session
u.add_enabled_units([beam])
# Use in calculations
flux_per_beam = 1.5 * u.Jy / beam
# Doppler equivalencies
rest_wavelength = 656.3 * u.nm # H-alpha
observed = 656.5 * u.nm
velocity = observed.to(u.km/u.s,
equivalencies=u.doppler_optical(rest_wavelength))
```
## 4. Time Handling
### Time Creation and Conversion
```python
from astropy.time import Time
import astropy.units as u
# Create time objects
t1 = Time('2023-01-01T00:00:00', format='isot', scale='utc')
t2 = Time(2459945.5, format='jd', scale='utc')
t3 = Time(['2023-01-01', '2023-06-01'], format='iso')
# Convert formats
print(t1.jd) # Julian Date
print(t1.mjd) # Modified Julian Date
print(t1.unix) # Unix timestamp
print(t1.iso) # ISO format
# Convert time scales
print(t1.tai) # Convert to TAI
print(t1.tt) # Convert to TT
print(t1.tdb) # Convert to TDB
```
### Time Arithmetic
```python
from astropy.time import Time, TimeDelta
import astropy.units as u
t1 = Time('2023-01-01T00:00:00')
dt = TimeDelta(1*u.day)
# Add time delta
t2 = t1 + dt
# Difference between times
diff = t2 - t1
print(diff.to(u.hour))
# Array of times
times = t1 + np.arange(10) * u.day
```
### Sidereal Time and Astronomical Calculations
```python
from astropy.time import Time
from astropy.coordinates import EarthLocation
import astropy.units as u
location = EarthLocation(lat=40*u.deg, lon=-70*u.deg)
t = Time('2023-01-01T00:00:00')
# Local sidereal time
lst = t.sidereal_time('apparent', longitude=location.lon)
# Light travel time correction
from astropy.coordinates import SkyCoord
target = SkyCoord(ra=10*u.deg, dec=40*u.deg)
ltt_bary = t.light_travel_time(target, location=location)
t_bary = t + ltt_bary
```
## 5. Tables and Data Management
### Creating and Manipulating Tables
```python
from astropy.table import Table, Column
import astropy.units as u
import numpy as np
# Create table
t = Table()
t['name'] = ['Star1', 'Star2', 'Star3']
t['ra'] = [10.5, 11.2, 12.3] * u.degree
t['dec'] = [41.2, 42.1, 43.5] * u.degree
t['flux'] = [1.2, 2.3, 0.8] * u.Jy
# Add column metadata
t['flux'].description = 'Flux at 1.4 GHz'
t['flux'].format = '.2f'
# Add new column
t['flux_mJy'] = t['flux'].to(u.mJy)
# Filter rows
bright = t[t['flux'] > 1.0 * u.Jy]
# Sort
t.sort('flux')
```
### Table I/O
```python
from astropy.table import Table
# Read various formats
t = Table.read('data.fits')
t = Table.read('data.csv', format='ascii.csv')
t = Table.read('data.ecsv', format='ascii.ecsv') # Preserves units
t = Table.read('data.votable', format='votable')
# Write various formats
t.write('output.fits', overwrite=True)
t.write('output.csv', format='ascii.csv', overwrite=True)
t.write('output.ecsv', format='ascii.ecsv', overwrite=True)
t.write('output.votable', format='votable', overwrite=True)
```
### Advanced Table Operations
```python
from astropy.table import Table, join, vstack, hstack
# Join tables
t1 = Table([[1, 2], ['a', 'b']], names=['id', 'val1'])
t2 = Table([[1, 2], ['c', 'd']], names=['id', 'val2'])
joined = join(t1, t2, keys='id')
# Stack tables vertically
combined = vstack([t1, t2])
# Stack horizontally
combined = hstack([t1, t2])
# Grouping
t.group_by('category').groups.aggregate(np.mean)
```
### Tables with Astronomical Objects
```python
from astropy.table import Table
from astropy.coordinates import SkyCoord
from astropy.time import Time
import astropy.units as u
# Table with SkyCoord column
coords = SkyCoord(ra=[10, 11, 12]*u.deg, dec=[40, 41, 42]*u.deg)
times = Time(['2023-01-01', '2023-01-02', '2023-01-03'])
t = Table([coords, times], names=['coords', 'obstime'])
# Access individual coordinates
print(t['coords'][0].ra)
print(t['coords'][0].dec)
```
## 6. Cosmological Calculations
### Distance Calculations
```python
from astropy.cosmology import Planck18, FlatLambdaCDM
import astropy.units as u
import numpy as np
# Use built-in cosmology
cosmo = Planck18
# Redshifts
z = np.linspace(0, 5, 50)
# Calculate distances
comoving_dist = cosmo.comoving_distance(z)
angular_diam_dist = cosmo.angular_diameter_distance(z)
luminosity_dist = cosmo.luminosity_distance(z)
# Age of universe
age_at_z = cosmo.age(z)
lookback_time = cosmo.lookback_time(z)
# Hubble parameter
H_z = cosmo.H(z)
```
### Converting Observables
```python
from astropy.cosmology import Planck18
import astropy.units as u
cosmo = Planck18
z = 1.5
# Angular diameter distance
d_A = cosmo.angular_diameter_distance(z)
# Convert angular size to physical size
angular_size = 1 * u.arcsec
physical_size = (angular_size.to(u.radian) * d_A).to(u.kpc)
# Convert flux to luminosity
flux = 1e-17 * u.erg / u.s / u.cm**2
d_L = cosmo.luminosity_distance(z)
luminosity = flux * 4 * np.pi * d_L**2
# Find redshift for given distance
from astropy.cosmology import z_at_value
z_result = z_at_value(cosmo.luminosity_distance, 1000*u.Mpc)
```
### Custom Cosmology
```python
from astropy.cosmology import FlatLambdaCDM
import astropy.units as u
# Define custom cosmology
my_cosmo = FlatLambdaCDM(H0=70 * u.km/u.s/u.Mpc,
Om0=0.3,
Tcmb0=2.725 * u.K)
# Use it for calculations
print(my_cosmo.age(0))
print(my_cosmo.luminosity_distance(1.5))
```
## 7. Model Fitting
### Fitting 1D Models
```python
from astropy.modeling import models, fitting
import numpy as np
import matplotlib.pyplot as plt
# Generate data with noise
x = np.linspace(0, 10, 100)
true_model = models.Gaussian1D(amplitude=10, mean=5, stddev=1)
y = true_model(x) + np.random.normal(0, 0.5, x.shape)
# Create and fit model
g_init = models.Gaussian1D(amplitude=8, mean=4.5, stddev=0.8)
fitter = fitting.LevMarLSQFitter()
g_fit = fitter(g_init, x, y)
# Plot results
plt.plot(x, y, 'o', label='Data')
plt.plot(x, g_fit(x), label='Fit')
plt.legend()
# Get fitted parameters
print(f"Amplitude: {g_fit.amplitude.value}")
print(f"Mean: {g_fit.mean.value}")
print(f"Stddev: {g_fit.stddev.value}")
```
### Fitting with Constraints
```python
from astropy.modeling import models, fitting
# Set parameter bounds
g = models.Gaussian1D(amplitude=10, mean=5, stddev=1)
g.amplitude.bounds = (0, None) # Positive only
g.mean.bounds = (4, 6) # Constrain center
g.stddev.fixed = True # Fix width
# Tie parameters (for multi-component models)
g1 = models.Gaussian1D(amplitude=10, mean=5, stddev=1, name='g1')
g2 = models.Gaussian1D(amplitude=5, mean=6, stddev=1, name='g2')
g2.stddev.tied = lambda model: model.g1.stddev
# Compound model
model = g1 + g2
```
### 2D Image Fitting
```python
from astropy.modeling import models, fitting
import numpy as np
# Create 2D data
y, x = np.mgrid[0:100, 0:100]
z = models.Gaussian2D(amplitude=100, x_mean=50, y_mean=50,
x_stddev=5, y_stddev=5)(x, y)
z += np.random.normal(0, 5, z.shape)
# Fit 2D Gaussian
g_init = models.Gaussian2D(amplitude=90, x_mean=48, y_mean=48,
x_stddev=4, y_stddev=4)
fitter = fitting.LevMarLSQFitter()
g_fit = fitter(g_init, x, y, z)
# Get parameters
print(f"Center: ({g_fit.x_mean.value}, {g_fit.y_mean.value})")
print(f"Width: ({g_fit.x_stddev.value}, {g_fit.y_stddev.value})")
```
## 8. Image Processing and Visualization
### Image Display with Proper Scaling
```python
from astropy.io import fits
from astropy.visualization import ImageNormalize, SqrtStretch, PercentileInterval
import matplotlib.pyplot as plt
# Read FITS image
data = fits.getdata('image.fits')
# Apply normalization
norm = ImageNormalize(data,
interval=PercentileInterval(99),
stretch=SqrtStretch())
# Display
plt.imshow(data, norm=norm, origin='lower', cmap='gray')
plt.colorbar()
```
### WCS Plotting
```python
from astropy.io import fits
from astropy.wcs import WCS
from astropy.visualization import ImageNormalize, LogStretch, PercentileInterval
import matplotlib.pyplot as plt
# Read FITS with WCS
hdu = fits.open('image.fits')[0]
wcs = WCS(hdu.header)
data = hdu.data
# Create figure with WCS projection
fig = plt.figure()
ax = fig.add_subplot(111, projection=wcs)
# Plot with coordinate grid
norm = ImageNormalize(data, interval=PercentileInterval(99.5),
stretch=LogStretch())
im = ax.imshow(data, norm=norm, origin='lower', cmap='viridis')
# Add coordinate labels
ax.set_xlabel('RA')
ax.set_ylabel('Dec')
ax.coords.grid(color='white', alpha=0.5)
plt.colorbar(im)
```
### Sigma Clipping and Statistics
```python
from astropy.stats import sigma_clip, sigma_clipped_stats
import numpy as np
# Data with outliers
data = np.random.normal(100, 15, 1000)
data[0:50] = np.random.normal(200, 10, 50) # Add outliers
# Sigma clipping
clipped = sigma_clip(data, sigma=3, maxiters=5)
# Get statistics on clipped data
mean, median, std = sigma_clipped_stats(data, sigma=3)
print(f"Mean: {mean:.2f}")
print(f"Median: {median:.2f}")
print(f"Std: {std:.2f}")
print(f"Clipped {clipped.mask.sum()} values")
```
## 9. Complete Analysis Example
### Photometry Pipeline
```python
from astropy.io import fits
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.stats import sigma_clipped_stats
from astropy.visualization import ImageNormalize, LogStretch
import astropy.units as u
import numpy as np
# Read FITS file
hdu = fits.open('observation.fits')[0]
data = hdu.data
header = hdu.header
wcs = WCS(header)
# Calculate background statistics
mean, median, std = sigma_clipped_stats(data, sigma=3.0)
print(f"Background: {median:.2f} +/- {std:.2f}")
# Subtract background
data_sub = data - median
# Known source coordinates
source_coord = SkyCoord(ra='10:42:30', dec='+41:16:09', unit=(u.hourangle, u.deg))
# Convert to pixel coordinates
x_pix, y_pix = wcs.world_to_pixel(source_coord)
# Simple aperture photometry
aperture_radius = 10 # pixels
y, x = np.ogrid[:data.shape[0], :data.shape[1]]
mask = (x - x_pix)**2 + (y - y_pix)**2 <= aperture_radius**2
aperture_sum = np.sum(data_sub[mask])
npix = np.sum(mask)
print(f"Source position: ({x_pix:.1f}, {y_pix:.1f})")
print(f"Aperture sum: {aperture_sum:.2f}")
print(f"S/N: {aperture_sum / (std * np.sqrt(npix)):.2f}")
```
This workflow document provides practical examples for common astronomical data analysis tasks using astropy.

View File

@@ -0,0 +1,273 @@
# Astronomical Coordinates (astropy.coordinates)
The `astropy.coordinates` package provides tools for representing celestial coordinates and transforming between different coordinate systems.
## Creating Coordinates with SkyCoord
The high-level `SkyCoord` class is the recommended interface:
```python
from astropy import units as u
from astropy.coordinates import SkyCoord
# Decimal degrees
c = SkyCoord(ra=10.625*u.degree, dec=41.2*u.degree, frame='icrs')
# Sexagesimal strings
c = SkyCoord(ra='00h42m30s', dec='+41d12m00s', frame='icrs')
# Mixed formats
c = SkyCoord('00h42.5m +41d12m', unit=(u.hourangle, u.deg))
# Galactic coordinates
c = SkyCoord(l=120.5*u.degree, b=-23.4*u.degree, frame='galactic')
```
## Array Coordinates
Process multiple coordinates efficiently using arrays:
```python
# Create array of coordinates
coords = SkyCoord(ra=[10, 11, 12]*u.degree,
dec=[41, -5, 42]*u.degree)
# Access individual elements
coords[0]
coords[1:3]
# Array operations
coords.shape
len(coords)
```
## Accessing Components
```python
c = SkyCoord(ra=10.68*u.degree, dec=41.27*u.degree, frame='icrs')
# Access coordinates
c.ra # <Longitude 10.68 deg>
c.dec # <Latitude 41.27 deg>
c.ra.hour # Convert to hours
c.ra.hms # Hours, minutes, seconds tuple
c.dec.dms # Degrees, arcminutes, arcseconds tuple
```
## String Formatting
```python
c.to_string('decimal') # '10.68 41.27'
c.to_string('dms') # '10d40m48s 41d16m12s'
c.to_string('hmsdms') # '00h42m43.2s +41d16m12s'
# Custom formatting
c.ra.to_string(unit=u.hour, sep=':', precision=2)
```
## Coordinate Transformations
Transform between reference frames:
```python
c_icrs = SkyCoord(ra=10.68*u.degree, dec=41.27*u.degree, frame='icrs')
# Simple transformations (as attributes)
c_galactic = c_icrs.galactic
c_fk5 = c_icrs.fk5
c_fk4 = c_icrs.fk4
# Explicit transformations
c_icrs.transform_to('galactic')
c_icrs.transform_to(FK5(equinox='J1975')) # Custom frame parameters
```
## Common Coordinate Frames
### Celestial Frames
- **ICRS**: International Celestial Reference System (default, most common)
- **FK5**: Fifth Fundamental Catalogue (equinox J2000.0 by default)
- **FK4**: Fourth Fundamental Catalogue (older, requires equinox specification)
- **GCRS**: Geocentric Celestial Reference System
- **CIRS**: Celestial Intermediate Reference System
### Galactic Frames
- **Galactic**: IAU 1958 galactic coordinates
- **Supergalactic**: De Vaucouleurs supergalactic coordinates
- **Galactocentric**: Galactic center-based 3D coordinates
### Horizontal Frames
- **AltAz**: Altitude-azimuth (observer-dependent)
- **HADec**: Hour angle-declination
### Ecliptic Frames
- **GeocentricMeanEcliptic**: Geocentric mean ecliptic
- **BarycentricMeanEcliptic**: Barycentric mean ecliptic
- **HeliocentricMeanEcliptic**: Heliocentric mean ecliptic
## Observer-Dependent Transformations
For altitude-azimuth coordinates, specify observation time and location:
```python
from astropy.time import Time
from astropy.coordinates import EarthLocation, AltAz
# Define observer location
observing_location = EarthLocation(lat=40.8*u.deg, lon=-121.5*u.deg, height=1060*u.m)
# Or use named observatory
observing_location = EarthLocation.of_site('Apache Point Observatory')
# Define observation time
observing_time = Time('2023-01-15 23:00:00')
# Transform to alt-az
aa_frame = AltAz(obstime=observing_time, location=observing_location)
aa = c_icrs.transform_to(aa_frame)
print(f"Altitude: {aa.alt}")
print(f"Azimuth: {aa.az}")
```
## Working with Distances
Add distance information for 3D coordinates:
```python
# With distance
c = SkyCoord(ra=10*u.degree, dec=9*u.degree, distance=770*u.kpc, frame='icrs')
# Access 3D Cartesian coordinates
c.cartesian.x
c.cartesian.y
c.cartesian.z
# Distance from origin
c.distance
# 3D separation
c1 = SkyCoord(ra=10*u.degree, dec=9*u.degree, distance=10*u.pc)
c2 = SkyCoord(ra=11*u.degree, dec=10*u.degree, distance=11.5*u.pc)
sep_3d = c1.separation_3d(c2) # 3D distance
```
## Angular Separation
Calculate on-sky separations:
```python
c1 = SkyCoord(ra=10*u.degree, dec=9*u.degree, frame='icrs')
c2 = SkyCoord(ra=11*u.degree, dec=10*u.degree, frame='fk5')
# Angular separation (handles frame conversion automatically)
sep = c1.separation(c2)
print(f"Separation: {sep.arcsec} arcsec")
# Position angle
pa = c1.position_angle(c2)
```
## Catalog Matching
Match coordinates to catalog sources:
```python
# Single target matching
catalog = SkyCoord(ra=ra_array*u.degree, dec=dec_array*u.degree)
target = SkyCoord(ra=10.5*u.degree, dec=41.2*u.degree)
# Find closest match
idx, sep2d, dist3d = target.match_to_catalog_sky(catalog)
matched_coord = catalog[idx]
# Match with maximum separation constraint
matches = target.separation(catalog) < 1*u.arcsec
```
## Named Objects
Retrieve coordinates from online catalogs:
```python
# Query by name (requires internet)
m31 = SkyCoord.from_name("M31")
crab = SkyCoord.from_name("Crab Nebula")
psr = SkyCoord.from_name("PSR J1012+5307")
```
## Earth Locations
Define observer locations:
```python
# By coordinates
location = EarthLocation(lat=40*u.deg, lon=-120*u.deg, height=1000*u.m)
# By named observatory
keck = EarthLocation.of_site('Keck Observatory')
vlt = EarthLocation.of_site('Paranal Observatory')
# By address (requires internet)
location = EarthLocation.of_address('1002 Holy Grail Court, St. Louis, MO')
# List available observatories
EarthLocation.get_site_names()
```
## Velocity Information
Include proper motion and radial velocity:
```python
# Proper motion
c = SkyCoord(ra=10*u.degree, dec=41*u.degree,
pm_ra_cosdec=15*u.mas/u.yr,
pm_dec=5*u.mas/u.yr,
distance=150*u.pc)
# Radial velocity
c = SkyCoord(ra=10*u.degree, dec=41*u.degree,
radial_velocity=20*u.km/u.s)
# Both
c = SkyCoord(ra=10*u.degree, dec=41*u.degree, distance=150*u.pc,
pm_ra_cosdec=15*u.mas/u.yr, pm_dec=5*u.mas/u.yr,
radial_velocity=20*u.km/u.s)
```
## Representation Types
Switch between coordinate representations:
```python
# Cartesian representation
c = SkyCoord(x=1*u.kpc, y=2*u.kpc, z=3*u.kpc,
representation_type='cartesian', frame='icrs')
# Change representation
c.representation_type = 'cylindrical'
c.rho # Cylindrical radius
c.phi # Azimuthal angle
c.z # Height
# Spherical (default for most frames)
c.representation_type = 'spherical'
```
## Performance Tips
1. **Use arrays, not loops**: Process multiple coordinates as single array
2. **Pre-compute frames**: Reuse frame objects for multiple transformations
3. **Use broadcasting**: Efficiently transform many positions across many times
4. **Enable interpolation**: For dense time sampling, use ErfaAstromInterpolator
```python
# Fast approach
coords = SkyCoord(ra=ra_array*u.degree, dec=dec_array*u.degree)
coords_transformed = coords.transform_to('galactic')
# Slow approach (avoid)
for ra, dec in zip(ra_array, dec_array):
c = SkyCoord(ra=ra*u.degree, dec=dec*u.degree)
c_transformed = c.transform_to('galactic')
```

View File

@@ -0,0 +1,307 @@
# Cosmological Calculations (astropy.cosmology)
The `astropy.cosmology` subpackage provides tools for cosmological calculations based on various cosmological models.
## Using Built-in Cosmologies
Preloaded cosmologies based on WMAP and Planck observations:
```python
from astropy.cosmology import Planck18, Planck15, Planck13
from astropy.cosmology import WMAP9, WMAP7, WMAP5
from astropy import units as u
# Use Planck 2018 cosmology
cosmo = Planck18
# Calculate distance to z=4
d = cosmo.luminosity_distance(4)
print(f"Luminosity distance at z=4: {d}")
# Age of universe at z=0
age = cosmo.age(0)
print(f"Current age of universe: {age.to(u.Gyr)}")
```
## Creating Custom Cosmologies
### FlatLambdaCDM (Most Common)
Flat universe with cosmological constant:
```python
from astropy.cosmology import FlatLambdaCDM
# Define cosmology
cosmo = FlatLambdaCDM(
H0=70 * u.km / u.s / u.Mpc, # Hubble constant at z=0
Om0=0.3, # Matter density parameter at z=0
Tcmb0=2.725 * u.K # CMB temperature (optional)
)
```
### LambdaCDM (Non-Flat)
Non-flat universe with cosmological constant:
```python
from astropy.cosmology import LambdaCDM
cosmo = LambdaCDM(
H0=70 * u.km / u.s / u.Mpc,
Om0=0.3,
Ode0=0.7 # Dark energy density parameter
)
```
### wCDM and w0wzCDM
Dark energy with equation of state parameter:
```python
from astropy.cosmology import FlatwCDM, w0wzCDM
# Constant w
cosmo_w = FlatwCDM(H0=70 * u.km/u.s/u.Mpc, Om0=0.3, w0=-0.9)
# Evolving w(z) = w0 + wz * z
cosmo_wz = w0wzCDM(H0=70 * u.km/u.s/u.Mpc, Om0=0.3, Ode0=0.7,
w0=-1.0, wz=0.1)
```
## Distance Calculations
### Comoving Distance
Line-of-sight comoving distance:
```python
d_c = cosmo.comoving_distance(z)
```
### Luminosity Distance
Distance for calculating luminosity from observed flux:
```python
d_L = cosmo.luminosity_distance(z)
# Calculate absolute magnitude from apparent magnitude
M = m - 5*np.log10(d_L.to(u.pc).value) + 5
```
### Angular Diameter Distance
Distance for calculating physical size from angular size:
```python
d_A = cosmo.angular_diameter_distance(z)
# Calculate physical size from angular size
theta = 10 * u.arcsec # Angular size
physical_size = d_A * theta.to(u.radian).value
```
### Comoving Transverse Distance
Transverse comoving distance (equals comoving distance in flat universe):
```python
d_M = cosmo.comoving_transverse_distance(z)
```
### Distance Modulus
```python
dm = cosmo.distmod(z)
# Relates apparent and absolute magnitudes: m - M = dm
```
## Scale Calculations
### kpc per Arcminute
Physical scale at a given redshift:
```python
scale = cosmo.kpc_proper_per_arcmin(z)
# e.g., "50 kpc per arcminute at z=1"
```
### Comoving Volume
Volume element for survey volume calculations:
```python
vol = cosmo.comoving_volume(z) # Total volume to redshift z
vol_element = cosmo.differential_comoving_volume(z) # dV/dz
```
## Time Calculations
### Age of Universe
Age at a given redshift:
```python
age = cosmo.age(z)
age_now = cosmo.age(0) # Current age
age_at_z1 = cosmo.age(1) # Age at z=1
```
### Lookback Time
Time since photons were emitted:
```python
t_lookback = cosmo.lookback_time(z)
# Time between z and z=0
```
## Hubble Parameter
Hubble parameter as function of redshift:
```python
H_z = cosmo.H(z) # H(z) in km/s/Mpc
E_z = cosmo.efunc(z) # E(z) = H(z)/H0
```
## Density Parameters
Evolution of density parameters with redshift:
```python
Om_z = cosmo.Om(z) # Matter density at z
Ode_z = cosmo.Ode(z) # Dark energy density at z
Ok_z = cosmo.Ok(z) # Curvature density at z
Ogamma_z = cosmo.Ogamma(z) # Photon density at z
Onu_z = cosmo.Onu(z) # Neutrino density at z
```
## Critical and Characteristic Densities
```python
rho_c = cosmo.critical_density(z) # Critical density at z
rho_m = cosmo.critical_density(z) * cosmo.Om(z) # Matter density
```
## Inverse Calculations
Find redshift corresponding to a specific value:
```python
from astropy.cosmology import z_at_value
# Find z at specific lookback time
z = z_at_value(cosmo.lookback_time, 10*u.Gyr)
# Find z at specific luminosity distance
z = z_at_value(cosmo.luminosity_distance, 1000*u.Mpc)
# Find z at specific age
z = z_at_value(cosmo.age, 1*u.Gyr)
```
## Array Operations
All methods accept array inputs:
```python
import numpy as np
z_array = np.linspace(0, 5, 100)
d_L_array = cosmo.luminosity_distance(z_array)
H_array = cosmo.H(z_array)
age_array = cosmo.age(z_array)
```
## Neutrino Effects
Include massive neutrinos:
```python
from astropy.cosmology import FlatLambdaCDM
# With massive neutrinos
cosmo = FlatLambdaCDM(
H0=70 * u.km/u.s/u.Mpc,
Om0=0.3,
Tcmb0=2.725 * u.K,
Neff=3.04, # Effective number of neutrino species
m_nu=[0., 0., 0.06] * u.eV # Neutrino masses
)
```
Note: Massive neutrinos reduce performance by 3-4x but provide more accurate results.
## Cloning and Modifying Cosmologies
Cosmology objects are immutable. Create modified copies:
```python
# Clone with different H0
cosmo_new = cosmo.clone(H0=72 * u.km/u.s/u.Mpc)
# Clone with modified name
cosmo_named = cosmo.clone(name="My Custom Cosmology")
```
## Common Use Cases
### Calculating Absolute Magnitude
```python
# From apparent magnitude and redshift
z = 1.5
m_app = 24.5 # Apparent magnitude
d_L = cosmo.luminosity_distance(z)
M_abs = m_app - cosmo.distmod(z).value
```
### Survey Volume Calculations
```python
# Volume between two redshifts
z_min, z_max = 0.5, 1.5
volume = cosmo.comoving_volume(z_max) - cosmo.comoving_volume(z_min)
# Convert to Gpc^3
volume_gpc3 = volume.to(u.Gpc**3)
```
### Physical Size from Angular Size
```python
theta = 1 * u.arcsec # Angular size
z = 2.0
d_A = cosmo.angular_diameter_distance(z)
size_kpc = (d_A * theta.to(u.radian)).to(u.kpc)
```
### Time Since Big Bang
```python
# Age at specific redshift
z_formation = 6
age_at_formation = cosmo.age(z_formation)
time_since_formation = cosmo.age(0) - age_at_formation
```
## Comparison of Cosmologies
```python
# Compare different models
from astropy.cosmology import Planck18, WMAP9
z = 1.0
print(f"Planck18 d_L: {Planck18.luminosity_distance(z)}")
print(f"WMAP9 d_L: {WMAP9.luminosity_distance(z)}")
```
## Performance Considerations
- Calculations are fast for most purposes
- Massive neutrinos reduce speed significantly
- Array operations are vectorized and efficient
- Results valid for z < 5000-6000 (depends on model)

View File

@@ -0,0 +1,396 @@
# FITS File Handling (astropy.io.fits)
The `astropy.io.fits` module provides comprehensive tools for reading, writing, and manipulating FITS (Flexible Image Transport System) files.
## Opening FITS Files
### Basic File Opening
```python
from astropy.io import fits
# Open file (returns HDUList - list of HDUs)
hdul = fits.open('filename.fits')
# Always close when done
hdul.close()
# Better: use context manager (automatically closes)
with fits.open('filename.fits') as hdul:
hdul.info() # Display file structure
data = hdul[0].data
```
### File Opening Modes
```python
fits.open('file.fits', mode='readonly') # Read-only (default)
fits.open('file.fits', mode='update') # Read and write
fits.open('file.fits', mode='append') # Add HDUs to file
```
### Memory Mapping
For large files, use memory mapping (default behavior):
```python
hdul = fits.open('large_file.fits', memmap=True)
# Only loads data chunks as needed
```
### Remote Files
Access cloud-hosted FITS files:
```python
uri = "s3://bucket-name/image.fits"
with fits.open(uri, use_fsspec=True, fsspec_kwargs={"anon": True}) as hdul:
# Use .section to get cutouts without downloading entire file
cutout = hdul[1].section[100:200, 100:200]
```
## HDU Structure
FITS files contain Header Data Units (HDUs):
- **Primary HDU** (`hdul[0]`): First HDU, always present
- **Extension HDUs** (`hdul[1:]`): Image or table extensions
```python
hdul.info() # Display all HDUs
# Output:
# No. Name Ver Type Cards Dimensions Format
# 0 PRIMARY 1 PrimaryHDU 220 ()
# 1 SCI 1 ImageHDU 140 (1014, 1014) float32
# 2 ERR 1 ImageHDU 51 (1014, 1014) float32
```
## Accessing HDUs
```python
# By index
primary = hdul[0]
extension1 = hdul[1]
# By name
sci = hdul['SCI']
# By name and version number
sci2 = hdul['SCI', 2] # Second SCI extension
```
## Working with Headers
### Reading Header Values
```python
hdu = hdul[0]
header = hdu.header
# Get keyword value (case-insensitive)
observer = header['OBSERVER']
exptime = header['EXPTIME']
# Get with default if missing
filter_name = header.get('FILTER', 'Unknown')
# Access by index
value = header[7] # 8th card's value
```
### Modifying Headers
```python
# Update existing keyword
header['OBSERVER'] = 'Edwin Hubble'
# Add/update with comment
header['OBSERVER'] = ('Edwin Hubble', 'Name of observer')
# Add keyword at specific position
header.insert(5, ('NEWKEY', 'value', 'comment'))
# Add HISTORY and COMMENT
header['HISTORY'] = 'File processed on 2025-01-15'
header['COMMENT'] = 'Note about the data'
# Delete keyword
del header['OLDKEY']
```
### Header Cards
Each keyword is stored as a "card" (80-character record):
```python
# Access full card
card = header.cards[0]
print(f"{card.keyword} = {card.value} / {card.comment}")
# Iterate over all cards
for card in header.cards:
print(f"{card.keyword}: {card.value}")
```
## Working with Image Data
### Reading Image Data
```python
# Get data from HDU
data = hdul[1].data # Returns NumPy array
# Data properties
print(data.shape) # e.g., (1024, 1024)
print(data.dtype) # e.g., float32
print(data.min(), data.max())
# Access specific pixels
pixel_value = data[100, 200]
region = data[100:200, 300:400]
```
### Data Operations
Data is a NumPy array, so use standard NumPy operations:
```python
import numpy as np
# Statistics
mean = np.mean(data)
median = np.median(data)
std = np.std(data)
# Modify data
data[data < 0] = 0 # Clip negative values
data = data * gain + bias # Calibration
# Mathematical operations
log_data = np.log10(data)
smoothed = scipy.ndimage.gaussian_filter(data, sigma=2)
```
### Cutouts and Sections
Extract regions without loading entire array:
```python
# Section notation [y_start:y_end, x_start:x_end]
cutout = hdul[1].section[500:600, 700:800]
```
## Creating New FITS Files
### Simple Image File
```python
# Create data
data = np.random.random((100, 100))
# Create HDU
hdu = fits.PrimaryHDU(data=data)
# Add header keywords
hdu.header['OBJECT'] = 'Test Image'
hdu.header['EXPTIME'] = 300.0
# Write to file
hdu.writeto('new_image.fits')
# Overwrite if exists
hdu.writeto('new_image.fits', overwrite=True)
```
### Multi-Extension File
```python
# Create primary HDU (can have no data)
primary = fits.PrimaryHDU()
primary.header['TELESCOP'] = 'HST'
# Create image extensions
sci_data = np.ones((100, 100))
sci = fits.ImageHDU(data=sci_data, name='SCI')
err_data = np.ones((100, 100)) * 0.1
err = fits.ImageHDU(data=err_data, name='ERR')
# Combine into HDUList
hdul = fits.HDUList([primary, sci, err])
# Write to file
hdul.writeto('multi_extension.fits')
```
## Working with Table Data
### Reading Tables
```python
# Open table
with fits.open('table.fits') as hdul:
table = hdul[1].data # BinTableHDU or TableHDU
# Access columns
ra = table['RA']
dec = table['DEC']
mag = table['MAG']
# Access rows
first_row = table[0]
subset = table[10:20]
# Column info
cols = hdul[1].columns
print(cols.names)
cols.info()
```
### Creating Tables
```python
# Define columns
col1 = fits.Column(name='ID', format='K', array=[1, 2, 3, 4])
col2 = fits.Column(name='RA', format='D', array=[10.5, 11.2, 12.3, 13.1])
col3 = fits.Column(name='DEC', format='D', array=[41.2, 42.1, 43.5, 44.2])
col4 = fits.Column(name='Name', format='20A',
array=['Star1', 'Star2', 'Star3', 'Star4'])
# Create table HDU
table_hdu = fits.BinTableHDU.from_columns([col1, col2, col3, col4])
table_hdu.name = 'CATALOG'
# Write to file
table_hdu.writeto('catalog.fits', overwrite=True)
```
### Column Formats
Common FITS table column formats:
- `'A'`: Character string (e.g., '20A' for 20 characters)
- `'L'`: Logical (boolean)
- `'B'`: Unsigned byte
- `'I'`: 16-bit integer
- `'J'`: 32-bit integer
- `'K'`: 64-bit integer
- `'E'`: 32-bit floating point
- `'D'`: 64-bit floating point
## Modifying Existing Files
### Update Mode
```python
with fits.open('file.fits', mode='update') as hdul:
# Modify header
hdul[0].header['NEWKEY'] = 'value'
# Modify data
hdul[1].data[100, 100] = 999
# Changes automatically saved when context exits
```
### Append Mode
```python
# Add new extension to existing file
new_data = np.random.random((50, 50))
new_hdu = fits.ImageHDU(data=new_data, name='NEW_EXT')
with fits.open('file.fits', mode='append') as hdul:
hdul.append(new_hdu)
```
## Convenience Functions
For quick operations without managing HDU lists:
```python
# Get data only
data = fits.getdata('file.fits', ext=1)
# Get header only
header = fits.getheader('file.fits', ext=0)
# Get both
data, header = fits.getdata('file.fits', ext=1, header=True)
# Get single keyword value
exptime = fits.getval('file.fits', 'EXPTIME', ext=0)
# Set keyword value
fits.setval('file.fits', 'NEWKEY', value='newvalue', ext=0)
# Write simple file
fits.writeto('output.fits', data, header, overwrite=True)
# Append to file
fits.append('file.fits', data, header)
# Display file info
fits.info('file.fits')
```
## Comparing FITS Files
```python
# Print differences between two files
fits.printdiff('file1.fits', 'file2.fits')
# Compare programmatically
diff = fits.FITSDiff('file1.fits', 'file2.fits')
print(diff.report())
```
## Converting Between Formats
### FITS to/from Astropy Table
```python
from astropy.table import Table
# FITS to Table
table = Table.read('catalog.fits')
# Table to FITS
table.write('output.fits', format='fits', overwrite=True)
```
## Best Practices
1. **Always use context managers** (`with` statements) for safe file handling
2. **Avoid modifying structural keywords** (SIMPLE, BITPIX, NAXIS, etc.)
3. **Use memory mapping** for large files to conserve RAM
4. **Use .section** for remote files to avoid full downloads
5. **Check HDU structure** with `.info()` before accessing data
6. **Verify data types** before operations to avoid unexpected behavior
7. **Use convenience functions** for simple one-off operations
## Common Issues
### Handling Non-Standard FITS
Some files violate FITS standards:
```python
# Ignore verification warnings
hdul = fits.open('bad_file.fits', ignore_missing_end=True)
# Fix non-standard files
hdul = fits.open('bad_file.fits')
hdul.verify('fix') # Try to fix issues
hdul.writeto('fixed_file.fits')
```
### Large File Performance
```python
# Use memory mapping (default)
hdul = fits.open('huge_file.fits', memmap=True)
# For write operations with large arrays, use Dask
import dask.array as da
large_array = da.random.random((10000, 10000))
fits.writeto('output.fits', large_array)
```

View File

@@ -1,340 +0,0 @@
# Astropy Module Overview
This document provides a comprehensive reference of all major astropy subpackages and their capabilities.
## Core Data Structures
### astropy.units
**Purpose**: Handle physical units and dimensional analysis in computations.
**Key Classes**:
- `Quantity` - Combines numerical values with units
- `Unit` - Represents physical units
**Common Operations**:
```python
import astropy.units as u
distance = 5 * u.meter
time = 2 * u.second
velocity = distance / time # Returns Quantity in m/s
wavelength = 500 * u.nm
frequency = wavelength.to(u.Hz, equivalencies=u.spectral())
```
**Equivalencies**:
- `u.spectral()` - Convert wavelength ↔ frequency
- `u.doppler_optical()`, `u.doppler_radio()` - Velocity conversions
- `u.temperature()` - Temperature unit conversions
- `u.pixel_scale()` - Pixel to physical units
### astropy.constants
**Purpose**: Provide physical and astronomical constants.
**Common Constants**:
- `c` - Speed of light
- `G` - Gravitational constant
- `h` - Planck constant
- `M_sun`, `R_sun`, `L_sun` - Solar mass, radius, luminosity
- `M_earth`, `R_earth` - Earth mass, radius
- `pc`, `au` - Parsec, astronomical unit
### astropy.time
**Purpose**: Represent and manipulate times and dates with astronomical precision.
**Time Scales**:
- `UTC` - Coordinated Universal Time
- `TAI` - International Atomic Time
- `TT` - Terrestrial Time
- `TCB`, `TCG` - Barycentric/Geocentric Coordinate Time
- `TDB` - Barycentric Dynamical Time
- `UT1` - Universal Time
**Common Formats**:
- `iso`, `isot` - ISO 8601 strings
- `jd`, `mjd` - Julian/Modified Julian Date
- `unix`, `gps` - Unix/GPS timestamps
- `datetime` - Python datetime objects
**Example**:
```python
from astropy.time import Time
t = Time('2023-01-01T00:00:00', format='isot', scale='utc')
print(t.mjd) # Modified Julian Date
print(t.jd) # Julian Date
print(t.tt) # Convert to TT scale
```
### astropy.table
**Purpose**: Work with tabular data optimized for astronomical applications.
**Key Features**:
- Native support for astropy Quantity, Time, and SkyCoord columns
- Multi-dimensional columns
- Metadata preservation (units, descriptions, formats)
- Advanced operations: joins, grouping, binning
- File I/O via unified interface
**Example**:
```python
from astropy.table import Table
import astropy.units as u
t = Table()
t['name'] = ['Star1', 'Star2', 'Star3']
t['ra'] = [10.5, 11.2, 12.3] * u.degree
t['dec'] = [41.2, 42.1, 43.5] * u.degree
t['flux'] = [1.2, 2.3, 0.8] * u.Jy
```
## Coordinates and World Coordinate Systems
### astropy.coordinates
**Purpose**: Represent and transform celestial coordinates.
**Primary Interface**: `SkyCoord` - High-level class for sky positions
**Coordinate Frames**:
- `ICRS` - International Celestial Reference System (default)
- `FK5`, `FK4` - Fifth/Fourth Fundamental Katalog
- `Galactic`, `Supergalactic` - Galactic coordinates
- `AltAz` - Horizontal (altitude-azimuth) coordinates
- `GCRS`, `CIRS`, `ITRS` - Earth-based systems
- `BarycentricMeanEcliptic`, `HeliocentricMeanEcliptic`, `GeocentricMeanEcliptic` - Ecliptic coordinates
**Common Operations**:
```python
from astropy.coordinates import SkyCoord
import astropy.units as u
# Create coordinate
c = SkyCoord(ra=10.625*u.degree, dec=41.2*u.degree, frame='icrs')
# Transform to galactic
c_gal = c.galactic
# Calculate separation
c2 = SkyCoord(ra=11*u.degree, dec=42*u.degree, frame='icrs')
sep = c.separation(c2)
# Match catalogs
idx, sep2d, dist3d = c.match_to_catalog_sky(catalog_coords)
```
### astropy.wcs
**Purpose**: Handle World Coordinate System transformations for astronomical images.
**Key Class**: `WCS` - Maps between pixel and world coordinates
**Common Use Cases**:
- Convert pixel coordinates to RA/Dec
- Convert RA/Dec to pixel coordinates
- Handle distortion corrections (SIP, lookup tables)
**Example**:
```python
from astropy.wcs import WCS
from astropy.io import fits
hdu = fits.open('image.fits')[0]
wcs = WCS(hdu.header)
# Pixel to world
ra, dec = wcs.pixel_to_world_values(100, 200)
# World to pixel
x, y = wcs.world_to_pixel_values(ra, dec)
```
## File I/O
### astropy.io.fits
**Purpose**: Read and write FITS (Flexible Image Transport System) files.
**Key Classes**:
- `HDUList` - Container for all HDUs in a file
- `PrimaryHDU` - Primary header data unit
- `ImageHDU` - Image extension
- `BinTableHDU` - Binary table extension
- `Header` - FITS header keywords
**Common Operations**:
```python
from astropy.io import fits
# Read FITS file
with fits.open('file.fits') as hdul:
hdul.info() # Display structure
header = hdul[0].header
data = hdul[0].data
# Write FITS file
fits.writeto('output.fits', data, header)
# Update header keyword
fits.setval('file.fits', 'OBJECT', value='M31')
```
### astropy.io.ascii
**Purpose**: Read and write ASCII tables in various formats.
**Supported Formats**:
- Basic, CSV, tab-delimited
- CDS/MRT (Machine Readable Tables)
- IPAC, Daophot, SExtractor
- LaTeX tables
- HTML tables
### astropy.io.votable
**Purpose**: Handle Virtual Observatory (VO) table format.
### astropy.io.misc
**Purpose**: Additional formats including HDF5, Parquet, and YAML.
## Scientific Calculations
### astropy.cosmology
**Purpose**: Perform cosmological calculations.
**Common Models**:
- `FlatLambdaCDM` - Flat universe with cosmological constant (most common)
- `LambdaCDM` - Universe with cosmological constant
- `Planck18`, `Planck15`, `Planck13` - Pre-defined Planck parameters
- `WMAP9`, `WMAP7`, `WMAP5` - Pre-defined WMAP parameters
**Common Methods**:
```python
from astropy.cosmology import FlatLambdaCDM, Planck18
import astropy.units as u
cosmo = FlatLambdaCDM(H0=70, Om0=0.3)
# Or use built-in: cosmo = Planck18
z = 1.5
print(cosmo.age(z)) # Age of universe at z
print(cosmo.luminosity_distance(z)) # Luminosity distance
print(cosmo.angular_diameter_distance(z)) # Angular diameter distance
print(cosmo.comoving_distance(z)) # Comoving distance
print(cosmo.H(z)) # Hubble parameter at z
```
### astropy.modeling
**Purpose**: Framework for model evaluation and fitting.
**Model Categories**:
- 1D models: Gaussian1D, Lorentz1D, Voigt1D, Polynomial1D
- 2D models: Gaussian2D, Disk2D, Moffat2D
- Physical models: BlackBody, Drude1D, NFW
- Polynomial models: Chebyshev, Legendre
**Common Fitters**:
- `LinearLSQFitter` - Linear least squares
- `LevMarLSQFitter` - Levenberg-Marquardt
- `SimplexLSQFitter` - Downhill simplex
**Example**:
```python
from astropy.modeling import models, fitting
# Create model
g = models.Gaussian1D(amplitude=10, mean=5, stddev=1)
# Fit to data
fitter = fitting.LevMarLSQFitter()
fitted_model = fitter(g, x_data, y_data)
```
### astropy.convolution
**Purpose**: Convolve and filter astronomical data.
**Common Kernels**:
- `Gaussian2DKernel` - 2D Gaussian smoothing
- `Box2DKernel` - 2D boxcar smoothing
- `Tophat2DKernel` - 2D tophat filter
- Custom kernels via arrays
### astropy.stats
**Purpose**: Statistical tools for astronomical data analysis.
**Key Functions**:
- `sigma_clip()` - Remove outliers via sigma clipping
- `sigma_clipped_stats()` - Compute mean, median, std with clipping
- `mad_std()` - Median Absolute Deviation
- `biweight_location()`, `biweight_scale()` - Robust statistics
- `circmean()`, `circstd()` - Circular statistics
**Example**:
```python
from astropy.stats import sigma_clip, sigma_clipped_stats
# Remove outliers
filtered_data = sigma_clip(data, sigma=3, maxiters=5)
# Get statistics
mean, median, std = sigma_clipped_stats(data, sigma=3)
```
## Data Processing
### astropy.nddata
**Purpose**: Handle N-dimensional datasets with metadata.
**Key Class**: `NDData` - Container for array data with units, uncertainty, mask, and WCS
### astropy.timeseries
**Purpose**: Work with time series data.
**Key Classes**:
- `TimeSeries` - Time-indexed data table
- `BinnedTimeSeries` - Time-binned data
**Common Operations**:
- Period finding (Lomb-Scargle)
- Folding time series
- Binning data
### astropy.visualization
**Purpose**: Display astronomical data effectively.
**Key Features**:
- Image normalization (LogStretch, PowerStretch, SqrtStretch, etc.)
- Interval scaling (MinMaxInterval, PercentileInterval, ZScaleInterval)
- WCSAxes for plotting with coordinate overlays
- RGB image creation with stretching
- Astronomical colormaps
**Example**:
```python
from astropy.visualization import ImageNormalize, SqrtStretch, PercentileInterval
import matplotlib.pyplot as plt
norm = ImageNormalize(data, interval=PercentileInterval(99),
stretch=SqrtStretch())
plt.imshow(data, norm=norm, origin='lower')
```
## Utilities
### astropy.samp
**Purpose**: Simple Application Messaging Protocol for inter-application communication.
**Use Case**: Connect Python scripts with other astronomical tools (e.g., DS9, TOPCAT).
## Module Import Patterns
**Standard imports**:
```python
import astropy.units as u
from astropy.coordinates import SkyCoord
from astropy.time import Time
from astropy.io import fits
from astropy.table import Table
from astropy import constants as const
```
## Performance Tips
1. **Pre-compute composite units** for repeated operations
2. **Use `<<` operator** for fast unit assignments: `array << u.m` instead of `array * u.m`
3. **Vectorize operations** rather than looping over coordinates/times
4. **Use memmap=True** when opening large FITS files
5. **Install Bottleneck** for faster stats operations

View File

@@ -0,0 +1,489 @@
# Table Operations (astropy.table)
The `astropy.table` module provides flexible tools for working with tabular data, with support for units, masked values, and various file formats.
## Creating Tables
### Basic Table Creation
```python
from astropy.table import Table, QTable
import astropy.units as u
import numpy as np
# From column arrays
a = [1, 4, 5]
b = [2.0, 5.0, 8.2]
c = ['x', 'y', 'z']
t = Table([a, b, c], names=('id', 'flux', 'name'))
# With units (use QTable)
flux = [1.2, 2.3, 3.4] * u.Jy
wavelength = [500, 600, 700] * u.nm
t = QTable([flux, wavelength], names=('flux', 'wavelength'))
```
### From Lists of Rows
```python
# List of tuples
rows = [(1, 10.5, 'A'), (2, 11.2, 'B'), (3, 12.3, 'C')]
t = Table(rows=rows, names=('id', 'value', 'name'))
# List of dictionaries
rows = [{'id': 1, 'value': 10.5}, {'id': 2, 'value': 11.2}]
t = Table(rows)
```
### From NumPy Arrays
```python
# Structured array
arr = np.array([(1, 2.0, 'x'), (4, 5.0, 'y')],
dtype=[('a', 'i4'), ('b', 'f8'), ('c', 'U10')])
t = Table(arr)
# 2D array with column names
data = np.random.random((100, 3))
t = Table(data, names=['col1', 'col2', 'col3'])
```
### From Pandas DataFrame
```python
import pandas as pd
df = pd.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]})
t = Table.from_pandas(df)
```
## Accessing Table Data
### Basic Access
```python
# Column access
ra_col = t['ra'] # Returns Column object
dec_col = t['dec']
# Row access
first_row = t[0] # Returns Row object
row_slice = t[10:20] # Returns new Table
# Cell access
value = t['ra'][5] # 6th value in 'ra' column
value = t[5]['ra'] # Same thing
# Multiple columns
subset = t['ra', 'dec', 'mag']
```
### Table Properties
```python
len(t) # Number of rows
t.colnames # List of column names
t.dtype # Column data types
t.info # Detailed information
t.meta # Metadata dictionary
```
### Iteration
```python
# Iterate over rows
for row in t:
print(row['ra'], row['dec'])
# Iterate over columns
for colname in t.colnames:
print(t[colname])
```
## Modifying Tables
### Adding Columns
```python
# Add new column
t['new_col'] = [1, 2, 3, 4, 5]
t['calc'] = t['a'] + t['b'] # Calculated column
# Add column with units
t['velocity'] = [10, 20, 30] * u.km / u.s
# Add empty column
from astropy.table import Column
t['empty'] = Column(length=len(t), dtype=float)
# Insert at specific position
t.add_column([7, 8, 9], name='inserted', index=2)
```
### Removing Columns
```python
# Remove single column
t.remove_column('old_col')
# Remove multiple columns
t.remove_columns(['col1', 'col2'])
# Delete syntax
del t['col_name']
# Keep only specific columns
t.keep_columns(['ra', 'dec', 'mag'])
```
### Renaming Columns
```python
t.rename_column('old_name', 'new_name')
# Rename multiple
t.rename_columns(['old1', 'old2'], ['new1', 'new2'])
```
### Adding Rows
```python
# Add single row
t.add_row([1, 2.5, 'new'])
# Add row as dict
t.add_row({'ra': 10.5, 'dec': 41.2, 'mag': 18.5})
# Note: Adding rows one at a time is slow!
# Better to collect rows and create table at once
```
### Modifying Data
```python
# Modify column values
t['flux'] = t['flux'] * gain
t['mag'][t['mag'] < 0] = np.nan
# Modify single cell
t['ra'][5] = 10.5
# Modify entire row
t[0] = [new_id, new_ra, new_dec]
```
## Sorting and Filtering
### Sorting
```python
# Sort by single column
t.sort('mag')
# Sort descending
t.sort('mag', reverse=True)
# Sort by multiple columns
t.sort(['priority', 'mag'])
# Get sorted indices without modifying table
indices = t.argsort('mag')
sorted_table = t[indices]
```
### Filtering
```python
# Boolean indexing
bright = t[t['mag'] < 18]
nearby = t[t['distance'] < 100*u.pc]
# Multiple conditions
selected = t[(t['mag'] < 18) & (t['dec'] > 0)]
# Using numpy functions
high_snr = t[np.abs(t['flux'] / t['error']) > 5]
```
## Reading and Writing Files
### Supported Formats
FITS, HDF5, ASCII (CSV, ECSV, IPAC, etc.), VOTable, Parquet, ASDF
### Reading Files
```python
# Automatic format detection
t = Table.read('catalog.fits')
t = Table.read('data.csv')
t = Table.read('table.vot')
# Specify format explicitly
t = Table.read('data.txt', format='ascii')
t = Table.read('catalog.hdf5', path='/dataset/table')
# Read specific HDU from FITS
t = Table.read('file.fits', hdu=2)
```
### Writing Files
```python
# Automatic format from extension
t.write('output.fits')
t.write('output.csv')
# Specify format
t.write('output.txt', format='ascii.csv')
t.write('output.hdf5', path='/data/table', serialize_meta=True)
# Overwrite existing file
t.write('output.fits', overwrite=True)
```
### ASCII Format Options
```python
# CSV with custom delimiter
t.write('output.csv', format='ascii.csv', delimiter='|')
# Fixed-width format
t.write('output.txt', format='ascii.fixed_width')
# IPAC format
t.write('output.tbl', format='ascii.ipac')
# LaTeX table
t.write('table.tex', format='ascii.latex')
```
## Table Operations
### Stacking Tables (Vertical)
```python
from astropy.table import vstack
# Concatenate tables vertically
t1 = Table([[1, 2], [3, 4]], names=('a', 'b'))
t2 = Table([[5, 6], [7, 8]], names=('a', 'b'))
t_combined = vstack([t1, t2])
```
### Joining Tables (Horizontal)
```python
from astropy.table import hstack
# Concatenate tables horizontally
t1 = Table([[1, 2]], names=['a'])
t2 = Table([[3, 4]], names=['b'])
t_combined = hstack([t1, t2])
```
### Database-Style Joins
```python
from astropy.table import join
# Inner join on common column
t1 = Table([[1, 2, 3], ['a', 'b', 'c']], names=('id', 'data1'))
t2 = Table([[1, 2, 4], ['x', 'y', 'z']], names=('id', 'data2'))
t_joined = join(t1, t2, keys='id')
# Left/right/outer joins
t_joined = join(t1, t2, join_type='left')
t_joined = join(t1, t2, join_type='outer')
```
### Grouping and Aggregating
```python
# Group by column
g = t.group_by('filter')
# Aggregate groups
means = g.groups.aggregate(np.mean)
# Iterate over groups
for group in g.groups:
print(f"Filter: {group['filter'][0]}")
print(f"Mean mag: {np.mean(group['mag'])}")
```
### Unique Rows
```python
# Get unique rows
t_unique = t.unique('id')
# Multiple columns
t_unique = t.unique(['ra', 'dec'])
```
## Units and Quantities
Use QTable for unit-aware operations:
```python
from astropy.table import QTable
# Create table with units
t = QTable()
t['flux'] = [1.2, 2.3, 3.4] * u.Jy
t['wavelength'] = [500, 600, 700] * u.nm
# Unit conversions
t['flux'].to(u.mJy)
t['wavelength'].to(u.angstrom)
# Calculations preserve units
t['freq'] = t['wavelength'].to(u.Hz, equivalencies=u.spectral())
```
## Masking Missing Data
```python
from astropy.table import MaskedColumn
import numpy as np
# Create masked column
flux = MaskedColumn([1.2, np.nan, 3.4], mask=[False, True, False])
t = Table([flux], names=['flux'])
# Operations automatically handle masks
mean_flux = np.ma.mean(t['flux'])
# Fill masked values
t['flux'].filled(0) # Replace masked with 0
```
## Indexing for Fast Lookup
Create indices for fast row retrieval:
```python
# Add index on column
t.add_index('id')
# Fast lookup by index
row = t.loc[12345] # Find row where id=12345
# Range queries
subset = t.loc[100:200]
```
## Table Metadata
```python
# Set table-level metadata
t.meta['TELESCOPE'] = 'HST'
t.meta['FILTER'] = 'F814W'
t.meta['EXPTIME'] = 300.0
# Set column-level metadata
t['ra'].meta['unit'] = 'deg'
t['ra'].meta['description'] = 'Right Ascension'
t['ra'].description = 'Right Ascension' # Shortcut
```
## Performance Tips
### Fast Table Construction
```python
# SLOW: Adding rows one at a time
t = Table(names=['a', 'b'])
for i in range(1000):
t.add_row([i, i**2])
# FAST: Build from lists
rows = [(i, i**2) for i in range(1000)]
t = Table(rows=rows, names=['a', 'b'])
```
### Memory-Mapped FITS Tables
```python
# Don't load entire table into memory
t = Table.read('huge_catalog.fits', memmap=True)
# Only loads data when accessed
subset = t[10000:10100] # Efficient
```
### Copy vs. View
```python
# Create view (shares data, fast)
t_view = t['ra', 'dec']
# Create copy (independent data)
t_copy = t['ra', 'dec'].copy()
```
## Displaying Tables
```python
# Print to console
print(t)
# Show in interactive browser
t.show_in_browser()
t.show_in_browser(jsviewer=True) # Interactive sorting/filtering
# Paginated viewing
t.more()
# Custom formatting
t['flux'].format = '%.3f'
t['ra'].format = '{:.6f}'
```
## Converting to Other Formats
```python
# To NumPy array
arr = np.array(t)
# To Pandas DataFrame
df = t.to_pandas()
# To dictionary
d = {name: t[name] for name in t.colnames}
```
## Common Use Cases
### Cross-Matching Catalogs
```python
from astropy.coordinates import SkyCoord, match_coordinates_sky
# Create coordinate objects from table columns
coords1 = SkyCoord(t1['ra'], t1['dec'], unit='deg')
coords2 = SkyCoord(t2['ra'], t2['dec'], unit='deg')
# Find matches
idx, sep, _ = coords1.match_to_catalog_sky(coords2)
# Filter by separation
max_sep = 1 * u.arcsec
matches = sep < max_sep
t1_matched = t1[matches]
t2_matched = t2[idx[matches]]
```
### Binning Data
```python
from astropy.table import Table
import numpy as np
# Bin by magnitude
mag_bins = np.arange(10, 20, 0.5)
binned = t.group_by(np.digitize(t['mag'], mag_bins))
counts = binned.groups.aggregate(len)
```

View File

@@ -0,0 +1,404 @@
# Time Handling (astropy.time)
The `astropy.time` module provides robust tools for manipulating times and dates with support for various time scales and formats.
## Creating Time Objects
### Basic Creation
```python
from astropy.time import Time
import astropy.units as u
# ISO format (automatically detected)
t = Time('2023-01-15 12:30:45')
t = Time('2023-01-15T12:30:45')
# Specify format explicitly
t = Time('2023-01-15 12:30:45', format='iso', scale='utc')
# Julian Date
t = Time(2460000.0, format='jd')
# Modified Julian Date
t = Time(59945.0, format='mjd')
# Unix time (seconds since 1970-01-01)
t = Time(1673785845.0, format='unix')
```
### Array of Times
```python
# Multiple times
times = Time(['2023-01-01', '2023-06-01', '2023-12-31'])
# From arrays
import numpy as np
jd_array = np.linspace(2460000, 2460100, 100)
times = Time(jd_array, format='jd')
```
## Time Formats
### Supported Formats
```python
# ISO 8601
t = Time('2023-01-15 12:30:45', format='iso')
t = Time('2023-01-15T12:30:45.123', format='isot')
# Julian dates
t = Time(2460000.0, format='jd') # Julian Date
t = Time(59945.0, format='mjd') # Modified Julian Date
# Decimal year
t = Time(2023.5, format='decimalyear')
t = Time(2023.5, format='jyear') # Julian year
t = Time(2023.5, format='byear') # Besselian year
# Year and day-of-year
t = Time('2023:046', format='yday') # 46th day of 2023
# FITS format
t = Time('2023-01-15T12:30:45', format='fits')
# GPS seconds
t = Time(1000000000.0, format='gps')
# Unix time
t = Time(1673785845.0, format='unix')
# Matplotlib dates
t = Time(738521.0, format='plot_date')
# datetime objects
from datetime import datetime
dt = datetime(2023, 1, 15, 12, 30, 45)
t = Time(dt)
```
## Time Scales
### Available Time Scales
```python
# UTC - Coordinated Universal Time (default)
t = Time('2023-01-15 12:00:00', scale='utc')
# TAI - International Atomic Time
t = Time('2023-01-15 12:00:00', scale='tai')
# TT - Terrestrial Time
t = Time('2023-01-15 12:00:00', scale='tt')
# TDB - Barycentric Dynamical Time
t = Time('2023-01-15 12:00:00', scale='tdb')
# TCG - Geocentric Coordinate Time
t = Time('2023-01-15 12:00:00', scale='tcg')
# TCB - Barycentric Coordinate Time
t = Time('2023-01-15 12:00:00', scale='tcb')
# UT1 - Universal Time
t = Time('2023-01-15 12:00:00', scale='ut1')
```
### Converting Time Scales
```python
t = Time('2023-01-15 12:00:00', scale='utc')
# Convert to different scales
t_tai = t.tai
t_tt = t.tt
t_tdb = t.tdb
t_ut1 = t.ut1
# Check offset
print(f"TAI - UTC = {(t.tai - t.utc).sec} seconds")
# TAI - UTC = 37 seconds (leap seconds)
```
## Format Conversions
### Change Output Format
```python
t = Time('2023-01-15 12:30:45')
# Access in different formats
print(t.jd) # Julian Date
print(t.mjd) # Modified Julian Date
print(t.iso) # ISO format
print(t.isot) # ISO with 'T'
print(t.unix) # Unix time
print(t.decimalyear) # Decimal year
# Change default format
t.format = 'mjd'
print(t) # Displays as MJD
```
### High-Precision Output
```python
# Use subfmt for precision control
t.to_value('mjd', subfmt='float') # Standard float
t.to_value('mjd', subfmt='long') # Extended precision
t.to_value('mjd', subfmt='decimal') # Decimal (highest precision)
t.to_value('mjd', subfmt='str') # String representation
```
## Time Arithmetic
### TimeDelta Objects
```python
from astropy.time import TimeDelta
# Create time difference
dt = TimeDelta(1.0, format='jd') # 1 day
dt = TimeDelta(3600.0, format='sec') # 1 hour
# Subtract times
t1 = Time('2023-01-15')
t2 = Time('2023-02-15')
dt = t2 - t1
print(dt.jd) # 31 days
print(dt.sec) # 2678400 seconds
```
### Adding/Subtracting Time
```python
t = Time('2023-01-15 12:00:00')
# Add TimeDelta
t_future = t + TimeDelta(7, format='jd') # Add 7 days
# Add Quantity
t_future = t + 1*u.hour
t_future = t + 30*u.day
t_future = t + 1*u.year
# Subtract
t_past = t - 1*u.week
```
### Time Ranges
```python
# Create range of times
start = Time('2023-01-01')
end = Time('2023-12-31')
times = start + np.linspace(0, 365, 100) * u.day
# Or using TimeDelta
times = start + TimeDelta(np.linspace(0, 365, 100), format='jd')
```
## Observing-Related Features
### Sidereal Time
```python
from astropy.coordinates import EarthLocation
# Define observer location
location = EarthLocation(lat=40*u.deg, lon=-120*u.deg, height=1000*u.m)
# Create time with location
t = Time('2023-06-15 23:00:00', location=location)
# Calculate sidereal time
lst_apparent = t.sidereal_time('apparent')
lst_mean = t.sidereal_time('mean')
print(f"Local Sidereal Time: {lst_apparent}")
```
### Light Travel Time Corrections
```python
from astropy.coordinates import SkyCoord, EarthLocation
# Define target and observer
target = SkyCoord(ra=10*u.deg, dec=20*u.deg)
location = EarthLocation.of_site('Keck Observatory')
# Observation times
times = Time(['2023-01-01', '2023-06-01', '2023-12-31'],
location=location)
# Calculate light travel time to solar system barycenter
ltt_bary = times.light_travel_time(target, kind='barycentric')
ltt_helio = times.light_travel_time(target, kind='heliocentric')
# Apply correction
times_barycentric = times.tdb + ltt_bary
```
### Earth Rotation Angle
```python
# Earth rotation angle (for celestial to terrestrial transformations)
era = t.earth_rotation_angle()
```
## Handling Missing or Invalid Times
### Masked Times
```python
import numpy as np
# Create times with missing values
times = Time(['2023-01-01', '2023-06-01', '2023-12-31'])
times[1] = np.ma.masked # Mark as missing
# Check for masks
print(times.mask) # [False True False]
# Get unmasked version
times_clean = times.unmasked
# Fill masked values
times_filled = times.filled(Time('2000-01-01'))
```
## Time Precision and Representation
### Internal Representation
Time objects use two 64-bit floats (jd1, jd2) for high precision:
```python
t = Time('2023-01-15 12:30:45.123456789', format='iso', scale='utc')
# Access internal representation
print(t.jd1, t.jd2) # Integer and fractional parts
# This allows sub-nanosecond precision over astronomical timescales
```
### Precision
```python
# High precision for long time intervals
t1 = Time('1900-01-01')
t2 = Time('2100-01-01')
dt = t2 - t1
print(f"Time span: {dt.sec / (365.25 * 86400)} years")
# Maintains precision throughout
```
## Time Formatting
### Custom String Format
```python
t = Time('2023-01-15 12:30:45')
# Strftime-style formatting
t.strftime('%Y-%m-%d %H:%M:%S') # '2023-01-15 12:30:45'
t.strftime('%B %d, %Y') # 'January 15, 2023'
# ISO format subformats
t.iso # '2023-01-15 12:30:45.000'
t.isot # '2023-01-15T12:30:45.000'
t.to_value('iso', subfmt='date_hms') # '2023-01-15 12:30:45.000'
```
## Common Use Cases
### Converting Between Formats
```python
# MJD to ISO
t_mjd = Time(59945.0, format='mjd')
iso_string = t_mjd.iso
# ISO to JD
t_iso = Time('2023-01-15 12:00:00')
jd_value = t_iso.jd
# Unix to ISO
t_unix = Time(1673785845.0, format='unix')
iso_string = t_unix.iso
```
### Time Differences in Various Units
```python
t1 = Time('2023-01-01')
t2 = Time('2023-12-31')
dt = t2 - t1
print(f"Days: {dt.to(u.day)}")
print(f"Hours: {dt.to(u.hour)}")
print(f"Seconds: {dt.sec}")
print(f"Years: {dt.to(u.year)}")
```
### Creating Regular Time Series
```python
# Daily observations for a year
start = Time('2023-01-01')
times = start + np.arange(365) * u.day
# Hourly observations for a day
start = Time('2023-01-15 00:00:00')
times = start + np.arange(24) * u.hour
# Observations every 30 seconds
start = Time('2023-01-15 12:00:00')
times = start + np.arange(1000) * 30 * u.second
```
### Time Zone Handling
```python
# UTC to local time (requires datetime)
t = Time('2023-01-15 12:00:00', scale='utc')
dt_utc = t.to_datetime()
# Convert to specific timezone using pytz
import pytz
eastern = pytz.timezone('US/Eastern')
dt_eastern = dt_utc.replace(tzinfo=pytz.utc).astimezone(eastern)
```
### Barycentric Correction Example
```python
from astropy.coordinates import SkyCoord, EarthLocation
# Target coordinates
target = SkyCoord(ra='23h23m08.55s', dec='+18d24m59.3s')
# Observatory location
location = EarthLocation.of_site('Keck Observatory')
# Observation times (must include location)
times = Time(['2023-01-15 08:30:00', '2023-01-16 08:30:00'],
location=location)
# Calculate barycentric correction
ltt_bary = times.light_travel_time(target, kind='barycentric')
# Apply correction to get barycentric times
times_bary = times.tdb + ltt_bary
# For radial velocity correction
rv_correction = ltt_bary.to(u.km, equivalencies=u.dimensionless_angles())
```
## Performance Considerations
1. **Array operations are fast**: Process multiple times as arrays
2. **Format conversions are cached**: Repeated access is efficient
3. **Scale conversions may require IERS data**: Downloads automatically
4. **High precision maintained**: Sub-nanosecond accuracy across astronomical timescales

View File

@@ -0,0 +1,178 @@
# Units and Quantities (astropy.units)
The `astropy.units` module handles defining, converting between, and performing arithmetic with physical quantities.
## Creating Quantities
Multiply or divide numeric values by built-in units to create Quantity objects:
```python
from astropy import units as u
import numpy as np
# Scalar quantities
distance = 42.0 * u.meter
velocity = 100 * u.km / u.s
# Array quantities
distances = np.array([1., 2., 3.]) * u.m
wavelengths = [500, 600, 700] * u.nm
```
Access components via `.value` and `.unit` attributes:
```python
distance.value # 42.0
distance.unit # Unit("m")
```
## Unit Conversions
Use `.to()` method for conversions:
```python
distance = 1.0 * u.parsec
distance.to(u.km) # <Quantity 30856775814671.914 km>
wavelength = 500 * u.nm
wavelength.to(u.angstrom) # <Quantity 5000. Angstrom>
```
## Arithmetic Operations
Quantities support standard arithmetic with automatic unit management:
```python
# Basic operations
speed = 15.1 * u.meter / (32.0 * u.second) # <Quantity 0.471875 m / s>
area = (5 * u.m) * (3 * u.m) # <Quantity 15. m2>
# Units cancel when appropriate
ratio = (10 * u.m) / (5 * u.m) # <Quantity 2. (dimensionless)>
# Decompose complex units
time = (3.0 * u.kilometer / (130.51 * u.meter / u.second))
time.decompose() # <Quantity 22.986744310780782 s>
```
## Unit Systems
Convert between major unit systems:
```python
# SI to CGS
pressure = 1.0 * u.Pa
pressure.cgs # <Quantity 10. Ba>
# Find equivalent representations
(u.s ** -1).compose() # [Unit("Bq"), Unit("Hz"), ...]
```
## Equivalencies
Domain-specific conversions require equivalencies:
```python
# Spectral equivalency (wavelength ↔ frequency)
wavelength = 1000 * u.nm
wavelength.to(u.Hz, equivalencies=u.spectral())
# <Quantity 2.99792458e+14 Hz>
# Doppler equivalencies
velocity = 1000 * u.km / u.s
velocity.to(u.Hz, equivalencies=u.doppler_optical(500*u.nm))
# Other equivalencies
u.brightness_temperature(500*u.GHz)
u.doppler_radio(1.4*u.GHz)
u.mass_energy()
u.parallax()
```
## Logarithmic Units
Special units for magnitudes, decibels, and dex:
```python
# Magnitudes
flux = -2.5 * u.mag(u.ct / u.s)
# Decibels
power_ratio = 3 * u.dB(u.W)
# Dex (base-10 logarithm)
abundance = 8.5 * u.dex(u.cm**-3)
```
## Common Units
### Length
`u.m, u.km, u.cm, u.mm, u.micron, u.angstrom, u.au, u.pc, u.kpc, u.Mpc, u.lyr`
### Time
`u.s, u.min, u.hour, u.day, u.year, u.Myr, u.Gyr`
### Mass
`u.kg, u.g, u.M_sun, u.M_earth, u.M_jup`
### Temperature
`u.K, u.deg_C`
### Angle
`u.deg, u.arcmin, u.arcsec, u.rad, u.hourangle, u.mas`
### Energy/Power
`u.J, u.erg, u.eV, u.keV, u.MeV, u.GeV, u.W, u.L_sun`
### Frequency
`u.Hz, u.kHz, u.MHz, u.GHz`
### Flux
`u.Jy, u.mJy, u.erg / u.s / u.cm**2`
## Performance Optimization
Pre-compute composite units for array operations:
```python
# Slow (creates intermediate quantities)
result = array * u.m / u.s / u.kg / u.sr
# Fast (pre-computed composite unit)
UNIT_COMPOSITE = u.m / u.s / u.kg / u.sr
result = array * UNIT_COMPOSITE
# Fastest (avoid copying with <<)
result = array << UNIT_COMPOSITE # 10000x faster
```
## String Formatting
Format quantities with standard Python syntax:
```python
velocity = 15.1 * u.meter / (32.0 * u.second)
f"{velocity:0.03f}" # '0.472 m / s'
f"{velocity:.2e}" # '4.72e-01 m / s'
f"{velocity.unit:FITS}" # 'm s-1'
```
## Defining Custom Units
```python
# Create new unit
bakers_fortnight = u.def_unit('bakers_fortnight', 13 * u.day)
# Enable in string parsing
u.add_enabled_units([bakers_fortnight])
```
## Constants
Access physical constants with units:
```python
from astropy.constants import c, G, M_sun, h, k_B
speed_of_light = c.to(u.km/u.s)
gravitational_constant = G.to(u.m**3 / u.kg / u.s**2)
```

View File

@@ -0,0 +1,373 @@
# WCS and Other Astropy Modules
## World Coordinate System (astropy.wcs)
The WCS module manages transformations between pixel coordinates in images and world coordinates (e.g., celestial coordinates).
### Reading WCS from FITS
```python
from astropy.wcs import WCS
from astropy.io import fits
# Read WCS from FITS header
with fits.open('image.fits') as hdul:
wcs = WCS(hdul[0].header)
```
### Pixel to World Transformations
```python
# Single pixel to world coordinates
world = wcs.pixel_to_world(100, 200) # Returns SkyCoord
print(f"RA: {world.ra}, Dec: {world.dec}")
# Arrays of pixels
import numpy as np
x_pixels = np.array([100, 200, 300])
y_pixels = np.array([150, 250, 350])
world_coords = wcs.pixel_to_world(x_pixels, y_pixels)
```
### World to Pixel Transformations
```python
from astropy.coordinates import SkyCoord
import astropy.units as u
# Single coordinate
coord = SkyCoord(ra=10.5*u.degree, dec=41.2*u.degree)
x, y = wcs.world_to_pixel(coord)
# Array of coordinates
coords = SkyCoord(ra=[10, 11, 12]*u.degree, dec=[41, 42, 43]*u.degree)
x_pixels, y_pixels = wcs.world_to_pixel(coords)
```
### WCS Information
```python
# Print WCS details
print(wcs)
# Access key properties
print(wcs.wcs.crpix) # Reference pixel
print(wcs.wcs.crval) # Reference value (world coords)
print(wcs.wcs.cd) # CD matrix
print(wcs.wcs.ctype) # Coordinate types
# Pixel scale
pixel_scale = wcs.proj_plane_pixel_scales() # Returns Quantity array
```
### Creating WCS
```python
from astropy.wcs import WCS
# Create new WCS
wcs = WCS(naxis=2)
wcs.wcs.crpix = [512.0, 512.0] # Reference pixel
wcs.wcs.crval = [10.5, 41.2] # RA, Dec at reference pixel
wcs.wcs.ctype = ['RA---TAN', 'DEC--TAN'] # Projection type
wcs.wcs.cdelt = [-0.0001, 0.0001] # Pixel scale (degrees/pixel)
wcs.wcs.cunit = ['deg', 'deg']
```
### Footprint and Coverage
```python
# Calculate image footprint (corner coordinates)
footprint = wcs.calc_footprint()
# Returns array of [RA, Dec] for each corner
```
## NDData (astropy.nddata)
Container for n-dimensional datasets with metadata, uncertainty, and masking.
### Creating NDData
```python
from astropy.nddata import NDData
import numpy as np
import astropy.units as u
# Basic NDData
data = np.random.random((100, 100))
ndd = NDData(data)
# With units
ndd = NDData(data, unit=u.electron/u.s)
# With uncertainty
from astropy.nddata import StdDevUncertainty
uncertainty = StdDevUncertainty(np.sqrt(data))
ndd = NDData(data, uncertainty=uncertainty, unit=u.electron/u.s)
# With mask
mask = data < 0.1 # Mask low values
ndd = NDData(data, mask=mask)
# With WCS
from astropy.wcs import WCS
ndd = NDData(data, wcs=wcs)
```
### CCDData for CCD Images
```python
from astropy.nddata import CCDData
# Create CCDData
ccd = CCDData(data, unit=u.adu, meta={'object': 'M31'})
# Read from FITS
ccd = CCDData.read('image.fits', unit=u.adu)
# Write to FITS
ccd.write('output.fits', overwrite=True)
```
## Modeling (astropy.modeling)
Framework for creating and fitting models to data.
### Common Models
```python
from astropy.modeling import models, fitting
import numpy as np
# 1D Gaussian
gauss = models.Gaussian1D(amplitude=10, mean=5, stddev=1)
x = np.linspace(0, 10, 100)
y = gauss(x)
# 2D Gaussian
gauss_2d = models.Gaussian2D(amplitude=10, x_mean=50, y_mean=50,
x_stddev=5, y_stddev=3)
# Polynomial
poly = models.Polynomial1D(degree=3)
# Power law
power_law = models.PowerLaw1D(amplitude=10, x_0=1, alpha=2)
```
### Fitting Models to Data
```python
# Generate noisy data
true_model = models.Gaussian1D(amplitude=10, mean=5, stddev=1)
x = np.linspace(0, 10, 100)
y_true = true_model(x)
y_noisy = y_true + np.random.normal(0, 0.5, x.shape)
# Fit model
fitter = fitting.LevMarLSQFitter()
initial_model = models.Gaussian1D(amplitude=8, mean=4, stddev=1.5)
fitted_model = fitter(initial_model, x, y_noisy)
print(f"Fitted amplitude: {fitted_model.amplitude.value}")
print(f"Fitted mean: {fitted_model.mean.value}")
print(f"Fitted stddev: {fitted_model.stddev.value}")
```
### Compound Models
```python
# Add models
double_gauss = models.Gaussian1D(amp=5, mean=3, stddev=1) + \
models.Gaussian1D(amp=8, mean=7, stddev=1.5)
# Compose models
composite = models.Gaussian1D(amp=10, mean=5, stddev=1) | \
models.Scale(factor=2) # Scale output
```
## Visualization (astropy.visualization)
Tools for visualizing astronomical images and data.
### Image Normalization
```python
from astropy.visualization import simple_norm
import matplotlib.pyplot as plt
# Load image
from astropy.io import fits
data = fits.getdata('image.fits')
# Normalize for display
norm = simple_norm(data, 'sqrt', percent=99)
# Display
plt.imshow(data, norm=norm, cmap='gray', origin='lower')
plt.colorbar()
plt.show()
```
### Stretching and Intervals
```python
from astropy.visualization import (MinMaxInterval, AsinhStretch,
ImageNormalize, ZScaleInterval)
# Z-scale interval
interval = ZScaleInterval()
vmin, vmax = interval.get_limits(data)
# Asinh stretch
stretch = AsinhStretch()
norm = ImageNormalize(data, interval=interval, stretch=stretch)
plt.imshow(data, norm=norm, cmap='gray', origin='lower')
```
### PercentileInterval
```python
from astropy.visualization import PercentileInterval
# Show data between 5th and 95th percentiles
interval = PercentileInterval(90) # 90% of data
vmin, vmax = interval.get_limits(data)
plt.imshow(data, vmin=vmin, vmax=vmax, cmap='gray', origin='lower')
```
## Constants (astropy.constants)
Physical and astronomical constants with units.
```python
from astropy import constants as const
# Speed of light
c = const.c
print(f"c = {c}")
print(f"c in km/s = {c.to(u.km/u.s)}")
# Gravitational constant
G = const.G
# Astronomical constants
M_sun = const.M_sun # Solar mass
R_sun = const.R_sun # Solar radius
L_sun = const.L_sun # Solar luminosity
au = const.au # Astronomical unit
pc = const.pc # Parsec
# Fundamental constants
h = const.h # Planck constant
hbar = const.hbar # Reduced Planck constant
k_B = const.k_B # Boltzmann constant
m_e = const.m_e # Electron mass
m_p = const.m_p # Proton mass
e = const.e # Elementary charge
N_A = const.N_A # Avogadro constant
```
### Using Constants in Calculations
```python
# Calculate Schwarzschild radius
M = 10 * const.M_sun
r_s = 2 * const.G * M / const.c**2
print(f"Schwarzschild radius: {r_s.to(u.km)}")
# Calculate escape velocity
M = const.M_earth
R = const.R_earth
v_esc = np.sqrt(2 * const.G * M / R)
print(f"Earth escape velocity: {v_esc.to(u.km/u.s)}")
```
## Convolution (astropy.convolution)
Convolution kernels for image processing.
```python
from astropy.convolution import Gaussian2DKernel, convolve
# Create Gaussian kernel
kernel = Gaussian2DKernel(x_stddev=2)
# Convolve image
smoothed_image = convolve(data, kernel)
# Handle NaNs
from astropy.convolution import convolve_fft
smoothed = convolve_fft(data, kernel, nan_treatment='interpolate')
```
## Stats (astropy.stats)
Statistical functions for astronomical data.
```python
from astropy.stats import sigma_clip, sigma_clipped_stats
# Sigma clipping
clipped_data = sigma_clip(data, sigma=3, maxiters=5)
# Get statistics with sigma clipping
mean, median, std = sigma_clipped_stats(data, sigma=3.0)
# Robust statistics
from astropy.stats import mad_std, biweight_location, biweight_scale
robust_std = mad_std(data)
robust_mean = biweight_location(data)
robust_scale = biweight_scale(data)
```
## Utils
### Data Downloads
```python
from astropy.utils.data import download_file
# Download file (caches locally)
url = 'https://example.com/data.fits'
local_file = download_file(url, cache=True)
```
### Progress Bars
```python
from astropy.utils.console import ProgressBar
with ProgressBar(len(data_list)) as bar:
for item in data_list:
# Process item
bar.update()
```
## SAMP (Simple Application Messaging Protocol)
Interoperability with other astronomy tools.
```python
from astropy.samp import SAMPIntegratedClient
# Connect to SAMP hub
client = SAMPIntegratedClient()
client.connect()
# Broadcast table to other applications
message = {
'samp.mtype': 'table.load.votable',
'samp.params': {
'url': 'file:///path/to/table.xml',
'table-id': 'my_table',
'name': 'My Catalog'
}
}
client.notify_all(message)
# Disconnect
client.disconnect()
```

View File

@@ -1,226 +0,0 @@
#!/usr/bin/env python3
"""
Coordinate conversion utility for astronomical coordinates.
This script provides batch coordinate transformations between different
astronomical coordinate systems using astropy.
"""
import sys
import argparse
from astropy.coordinates import SkyCoord
import astropy.units as u
def convert_coordinates(coords_input, input_frame='icrs', output_frame='galactic',
input_format='decimal', output_format='decimal'):
"""
Convert astronomical coordinates between different frames.
Parameters
----------
coords_input : list of tuples or str
Input coordinates as (lon, lat) pairs or strings
input_frame : str
Input coordinate frame (icrs, fk5, galactic, etc.)
output_frame : str
Output coordinate frame
input_format : str
Format of input coordinates ('decimal', 'sexagesimal', 'hourangle')
output_format : str
Format for output display ('decimal', 'sexagesimal', 'hourangle')
Returns
-------
list
Converted coordinates
"""
results = []
for coord in coords_input:
try:
# Parse input coordinate
if input_format == 'decimal':
if isinstance(coord, str):
parts = coord.split()
lon, lat = float(parts[0]), float(parts[1])
else:
lon, lat = coord
c = SkyCoord(lon*u.degree, lat*u.degree, frame=input_frame)
elif input_format == 'sexagesimal':
c = SkyCoord(coord, frame=input_frame, unit=(u.hourangle, u.deg))
elif input_format == 'hourangle':
if isinstance(coord, str):
parts = coord.split()
lon, lat = parts[0], parts[1]
else:
lon, lat = coord
c = SkyCoord(lon, lat, frame=input_frame, unit=(u.hourangle, u.deg))
# Transform to output frame
if output_frame == 'icrs':
c_out = c.icrs
elif output_frame == 'fk5':
c_out = c.fk5
elif output_frame == 'fk4':
c_out = c.fk4
elif output_frame == 'galactic':
c_out = c.galactic
elif output_frame == 'supergalactic':
c_out = c.supergalactic
else:
c_out = c.transform_to(output_frame)
results.append(c_out)
except Exception as e:
print(f"Error converting coordinate {coord}: {e}", file=sys.stderr)
results.append(None)
return results
def format_output(coords, frame, output_format='decimal'):
"""Format coordinates for display."""
output = []
for c in coords:
if c is None:
output.append("ERROR")
continue
if frame in ['icrs', 'fk5', 'fk4']:
lon_name, lat_name = 'RA', 'Dec'
lon = c.ra
lat = c.dec
elif frame == 'galactic':
lon_name, lat_name = 'l', 'b'
lon = c.l
lat = c.b
elif frame == 'supergalactic':
lon_name, lat_name = 'sgl', 'sgb'
lon = c.sgl
lat = c.sgb
else:
lon_name, lat_name = 'lon', 'lat'
lon = c.spherical.lon
lat = c.spherical.lat
if output_format == 'decimal':
out_str = f"{lon.degree:12.8f} {lat.degree:+12.8f}"
elif output_format == 'sexagesimal':
if frame in ['icrs', 'fk5', 'fk4']:
out_str = f"{lon.to_string(unit=u.hourangle, sep=':', pad=True)} "
out_str += f"{lat.to_string(unit=u.degree, sep=':', pad=True)}"
else:
out_str = f"{lon.to_string(unit=u.degree, sep=':', pad=True)} "
out_str += f"{lat.to_string(unit=u.degree, sep=':', pad=True)}"
elif output_format == 'hourangle':
out_str = f"{lon.to_string(unit=u.hourangle, sep=' ', pad=True)} "
out_str += f"{lat.to_string(unit=u.degree, sep=' ', pad=True)}"
output.append(out_str)
return output
def main():
"""Main function for command-line usage."""
parser = argparse.ArgumentParser(
description='Convert astronomical coordinates between different frames',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Supported frames: icrs, fk5, fk4, galactic, supergalactic
Input formats:
decimal : Degrees (e.g., "10.68 41.27")
sexagesimal : HMS/DMS (e.g., "00:42:44.3 +41:16:09")
hourangle : Hours and degrees (e.g., "10.5h 41.5d")
Examples:
%(prog)s --from icrs --to galactic "10.68 41.27"
%(prog)s --from icrs --to galactic --input decimal --output sexagesimal "150.5 -30.2"
%(prog)s --from galactic --to icrs "120.5 45.3"
%(prog)s --file coords.txt --from icrs --to galactic
"""
)
parser.add_argument('coordinates', nargs='*',
help='Coordinates to convert (lon lat pairs)')
parser.add_argument('-f', '--from', dest='input_frame', default='icrs',
help='Input coordinate frame (default: icrs)')
parser.add_argument('-t', '--to', dest='output_frame', default='galactic',
help='Output coordinate frame (default: galactic)')
parser.add_argument('-i', '--input', dest='input_format', default='decimal',
choices=['decimal', 'sexagesimal', 'hourangle'],
help='Input format (default: decimal)')
parser.add_argument('-o', '--output', dest='output_format', default='decimal',
choices=['decimal', 'sexagesimal', 'hourangle'],
help='Output format (default: decimal)')
parser.add_argument('--file', dest='input_file',
help='Read coordinates from file (one per line)')
parser.add_argument('--header', action='store_true',
help='Print header line with coordinate names')
args = parser.parse_args()
# Get coordinates from file or command line
if args.input_file:
try:
with open(args.input_file, 'r') as f:
coords = [line.strip() for line in f if line.strip()]
except FileNotFoundError:
print(f"Error: File '{args.input_file}' not found.", file=sys.stderr)
sys.exit(1)
else:
if not args.coordinates:
print("Error: No coordinates provided.", file=sys.stderr)
parser.print_help()
sys.exit(1)
# Combine pairs of arguments
if args.input_format == 'decimal':
coords = []
i = 0
while i < len(args.coordinates):
if i + 1 < len(args.coordinates):
coords.append(f"{args.coordinates[i]} {args.coordinates[i+1]}")
i += 2
else:
print(f"Warning: Odd number of coordinates, skipping last value",
file=sys.stderr)
break
else:
coords = args.coordinates
# Convert coordinates
converted = convert_coordinates(coords,
input_frame=args.input_frame,
output_frame=args.output_frame,
input_format=args.input_format,
output_format=args.output_format)
# Format and print output
formatted = format_output(converted, args.output_frame, args.output_format)
# Print header if requested
if args.header:
if args.output_frame in ['icrs', 'fk5', 'fk4']:
if args.output_format == 'decimal':
print(f"{'RA (deg)':>12s} {'Dec (deg)':>13s}")
else:
print(f"{'RA':>25s} {'Dec':>26s}")
elif args.output_frame == 'galactic':
if args.output_format == 'decimal':
print(f"{'l (deg)':>12s} {'b (deg)':>13s}")
else:
print(f"{'l':>25s} {'b':>26s}")
for line in formatted:
print(line)
if __name__ == '__main__':
main()

View File

@@ -1,250 +0,0 @@
#!/usr/bin/env python3
"""
Cosmological calculator using astropy.cosmology.
This script provides quick calculations of cosmological distances,
ages, and other quantities for given redshifts.
"""
import sys
import argparse
import numpy as np
from astropy.cosmology import FlatLambdaCDM, Planck18, Planck15, WMAP9
import astropy.units as u
def calculate_cosmology(redshifts, cosmology='Planck18', H0=None, Om0=None):
"""
Calculate cosmological quantities for given redshifts.
Parameters
----------
redshifts : array-like
Redshift values
cosmology : str
Cosmology to use ('Planck18', 'Planck15', 'WMAP9', 'custom')
H0 : float, optional
Hubble constant for custom cosmology (km/s/Mpc)
Om0 : float, optional
Matter density parameter for custom cosmology
Returns
-------
dict
Dictionary containing calculated quantities
"""
# Select cosmology
if cosmology == 'Planck18':
cosmo = Planck18
elif cosmology == 'Planck15':
cosmo = Planck15
elif cosmology == 'WMAP9':
cosmo = WMAP9
elif cosmology == 'custom':
if H0 is None or Om0 is None:
raise ValueError("Must provide H0 and Om0 for custom cosmology")
cosmo = FlatLambdaCDM(H0=H0 * u.km/u.s/u.Mpc, Om0=Om0)
else:
raise ValueError(f"Unknown cosmology: {cosmology}")
z = np.atleast_1d(redshifts)
results = {
'redshift': z,
'cosmology': str(cosmo),
'luminosity_distance': cosmo.luminosity_distance(z),
'angular_diameter_distance': cosmo.angular_diameter_distance(z),
'comoving_distance': cosmo.comoving_distance(z),
'comoving_volume': cosmo.comoving_volume(z),
'age': cosmo.age(z),
'lookback_time': cosmo.lookback_time(z),
'H': cosmo.H(z),
'scale_factor': 1.0 / (1.0 + z)
}
return results, cosmo
def print_results(results, verbose=False, csv=False):
"""Print calculation results."""
z = results['redshift']
if csv:
# CSV output
print("z,D_L(Mpc),D_A(Mpc),D_C(Mpc),Age(Gyr),t_lookback(Gyr),H(km/s/Mpc)")
for i in range(len(z)):
print(f"{z[i]:.6f},"
f"{results['luminosity_distance'][i].value:.6f},"
f"{results['angular_diameter_distance'][i].value:.6f},"
f"{results['comoving_distance'][i].value:.6f},"
f"{results['age'][i].value:.6f},"
f"{results['lookback_time'][i].value:.6f},"
f"{results['H'][i].value:.6f}")
else:
# Formatted table output
if verbose:
print(f"\nCosmology: {results['cosmology']}")
print("-" * 80)
print(f"\n{'z':>8s} {'D_L':>12s} {'D_A':>12s} {'D_C':>12s} "
f"{'Age':>10s} {'t_lb':>10s} {'H(z)':>10s}")
print(f"{'':>8s} {'(Mpc)':>12s} {'(Mpc)':>12s} {'(Mpc)':>12s} "
f"{'(Gyr)':>10s} {'(Gyr)':>10s} {'(km/s/Mpc)':>10s}")
print("-" * 80)
for i in range(len(z)):
print(f"{z[i]:8.4f} "
f"{results['luminosity_distance'][i].value:12.3f} "
f"{results['angular_diameter_distance'][i].value:12.3f} "
f"{results['comoving_distance'][i].value:12.3f} "
f"{results['age'][i].value:10.4f} "
f"{results['lookback_time'][i].value:10.4f} "
f"{results['H'][i].value:10.4f}")
if verbose:
print("\nLegend:")
print(" z : Redshift")
print(" D_L : Luminosity distance")
print(" D_A : Angular diameter distance")
print(" D_C : Comoving distance")
print(" Age : Age of universe at z")
print(" t_lb : Lookback time to z")
print(" H(z) : Hubble parameter at z")
def convert_quantity(value, quantity_type, cosmo, to_redshift=False):
"""
Convert between redshift and cosmological quantity.
Parameters
----------
value : float
Value to convert
quantity_type : str
Type of quantity ('luminosity_distance', 'age', etc.)
cosmo : Cosmology
Cosmology object
to_redshift : bool
If True, convert quantity to redshift; else convert z to quantity
"""
from astropy.cosmology import z_at_value
if to_redshift:
# Convert quantity to redshift
if quantity_type == 'luminosity_distance':
z = z_at_value(cosmo.luminosity_distance, value * u.Mpc)
elif quantity_type == 'age':
z = z_at_value(cosmo.age, value * u.Gyr)
elif quantity_type == 'lookback_time':
z = z_at_value(cosmo.lookback_time, value * u.Gyr)
elif quantity_type == 'comoving_distance':
z = z_at_value(cosmo.comoving_distance, value * u.Mpc)
else:
raise ValueError(f"Unknown quantity type: {quantity_type}")
return z
else:
# Convert redshift to quantity
if quantity_type == 'luminosity_distance':
return cosmo.luminosity_distance(value)
elif quantity_type == 'age':
return cosmo.age(value)
elif quantity_type == 'lookback_time':
return cosmo.lookback_time(value)
elif quantity_type == 'comoving_distance':
return cosmo.comoving_distance(value)
else:
raise ValueError(f"Unknown quantity type: {quantity_type}")
def main():
"""Main function for command-line usage."""
parser = argparse.ArgumentParser(
description='Calculate cosmological quantities for given redshifts',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Available cosmologies: Planck18, Planck15, WMAP9, custom
Examples:
%(prog)s 0.5 1.0 1.5
%(prog)s 0.5 --cosmology Planck15
%(prog)s 0.5 --cosmology custom --H0 70 --Om0 0.3
%(prog)s --range 0 3 0.5
%(prog)s 0.5 --verbose
%(prog)s 0.5 1.0 --csv
%(prog)s --convert 1000 --from luminosity_distance --cosmology Planck18
"""
)
parser.add_argument('redshifts', nargs='*', type=float,
help='Redshift values to calculate')
parser.add_argument('-c', '--cosmology', default='Planck18',
choices=['Planck18', 'Planck15', 'WMAP9', 'custom'],
help='Cosmology to use (default: Planck18)')
parser.add_argument('--H0', type=float,
help='Hubble constant for custom cosmology (km/s/Mpc)')
parser.add_argument('--Om0', type=float,
help='Matter density parameter for custom cosmology')
parser.add_argument('-r', '--range', nargs=3, type=float, metavar=('START', 'STOP', 'STEP'),
help='Generate redshift range (start stop step)')
parser.add_argument('-v', '--verbose', action='store_true',
help='Print verbose output with cosmology details')
parser.add_argument('--csv', action='store_true',
help='Output in CSV format')
parser.add_argument('--convert', type=float,
help='Convert a quantity to redshift')
parser.add_argument('--from', dest='from_quantity',
choices=['luminosity_distance', 'age', 'lookback_time', 'comoving_distance'],
help='Type of quantity to convert from')
args = parser.parse_args()
# Handle conversion mode
if args.convert is not None:
if args.from_quantity is None:
print("Error: Must specify --from when using --convert", file=sys.stderr)
sys.exit(1)
# Get cosmology
if args.cosmology == 'Planck18':
cosmo = Planck18
elif args.cosmology == 'Planck15':
cosmo = Planck15
elif args.cosmology == 'WMAP9':
cosmo = WMAP9
elif args.cosmology == 'custom':
if args.H0 is None or args.Om0 is None:
print("Error: Must provide --H0 and --Om0 for custom cosmology",
file=sys.stderr)
sys.exit(1)
cosmo = FlatLambdaCDM(H0=args.H0 * u.km/u.s/u.Mpc, Om0=args.Om0)
z = convert_quantity(args.convert, args.from_quantity, cosmo, to_redshift=True)
print(f"\n{args.from_quantity.replace('_', ' ').title()} = {args.convert}")
print(f"Redshift z = {z:.6f}")
print(f"(using {args.cosmology} cosmology)")
return
# Get redshifts
if args.range:
start, stop, step = args.range
redshifts = np.arange(start, stop + step/2, step)
elif args.redshifts:
redshifts = np.array(args.redshifts)
else:
print("Error: No redshifts provided.", file=sys.stderr)
parser.print_help()
sys.exit(1)
# Calculate
try:
results, cosmo = calculate_cosmology(redshifts, args.cosmology,
H0=args.H0, Om0=args.Om0)
print_results(results, verbose=args.verbose, csv=args.csv)
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()

View File

@@ -1,189 +0,0 @@
#!/usr/bin/env python3
"""
Quick FITS file inspection tool.
This script provides a convenient way to inspect FITS file structure,
headers, and basic statistics without writing custom code each time.
"""
import sys
from pathlib import Path
from astropy.io import fits
import numpy as np
def print_fits_info(filename, detailed=False, ext=None):
"""
Print comprehensive information about a FITS file.
Parameters
----------
filename : str
Path to FITS file
detailed : bool
If True, print detailed statistics for each HDU
ext : int or str, optional
Specific extension to examine in detail
"""
print(f"\n{'='*70}")
print(f"FITS File: {filename}")
print(f"{'='*70}\n")
try:
with fits.open(filename) as hdul:
# Print file structure
print("File Structure:")
print("-" * 70)
hdul.info()
print()
# If specific extension requested
if ext is not None:
print(f"\nDetailed view of extension: {ext}")
print("-" * 70)
hdu = hdul[ext]
print_hdu_details(hdu, detailed=True)
return
# Print header and data info for each HDU
for i, hdu in enumerate(hdul):
print(f"\n{'='*70}")
print(f"HDU {i}: {hdu.name}")
print(f"{'='*70}")
print_hdu_details(hdu, detailed=detailed)
except FileNotFoundError:
print(f"Error: File '{filename}' not found.")
sys.exit(1)
except Exception as e:
print(f"Error reading FITS file: {e}")
sys.exit(1)
def print_hdu_details(hdu, detailed=False):
"""Print details for a single HDU."""
# Header information
print("\nHeader Information:")
print("-" * 70)
# Key header keywords
important_keywords = ['SIMPLE', 'BITPIX', 'NAXIS', 'EXTEND',
'OBJECT', 'TELESCOP', 'INSTRUME', 'OBSERVER',
'DATE-OBS', 'EXPTIME', 'FILTER', 'AIRMASS',
'RA', 'DEC', 'EQUINOX', 'CTYPE1', 'CTYPE2']
header = hdu.header
for key in important_keywords:
if key in header:
value = header[key]
comment = header.comments[key]
print(f" {key:12s} = {str(value):20s} / {comment}")
# NAXIS keywords
if 'NAXIS' in header:
naxis = header['NAXIS']
for i in range(1, naxis + 1):
key = f'NAXIS{i}'
if key in header:
print(f" {key:12s} = {str(header[key]):20s} / {header.comments[key]}")
# Data information
if hdu.data is not None:
print("\nData Information:")
print("-" * 70)
data = hdu.data
print(f" Data type: {data.dtype}")
print(f" Shape: {data.shape}")
# For image data
if hasattr(data, 'ndim') and data.ndim >= 1:
try:
# Calculate statistics
finite_data = data[np.isfinite(data)]
if len(finite_data) > 0:
print(f" Min: {np.min(finite_data):.6g}")
print(f" Max: {np.max(finite_data):.6g}")
print(f" Mean: {np.mean(finite_data):.6g}")
print(f" Median: {np.median(finite_data):.6g}")
print(f" Std: {np.std(finite_data):.6g}")
# Count special values
n_nan = np.sum(np.isnan(data))
n_inf = np.sum(np.isinf(data))
if n_nan > 0:
print(f" NaN values: {n_nan}")
if n_inf > 0:
print(f" Inf values: {n_inf}")
except Exception as e:
print(f" Could not calculate statistics: {e}")
# For table data
if hasattr(data, 'columns'):
print(f"\n Table Columns ({len(data.columns)}):")
for col in data.columns:
print(f" {col.name:20s} {col.format:10s} {col.unit or ''}")
if detailed:
print(f"\n First few rows:")
print(data[:min(5, len(data))])
else:
print("\n No data in this HDU")
# WCS information if present
try:
from astropy.wcs import WCS
wcs = WCS(hdu.header)
if wcs.has_celestial:
print("\nWCS Information:")
print("-" * 70)
print(f" Has celestial WCS: Yes")
print(f" CTYPE: {wcs.wcs.ctype}")
if wcs.wcs.crval is not None:
print(f" CRVAL: {wcs.wcs.crval}")
if wcs.wcs.crpix is not None:
print(f" CRPIX: {wcs.wcs.crpix}")
if wcs.wcs.cdelt is not None:
print(f" CDELT: {wcs.wcs.cdelt}")
except Exception:
pass
def main():
"""Main function for command-line usage."""
import argparse
parser = argparse.ArgumentParser(
description='Inspect FITS file structure and contents',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
%(prog)s image.fits
%(prog)s image.fits --detailed
%(prog)s image.fits --ext 1
%(prog)s image.fits --ext SCI
"""
)
parser.add_argument('filename', help='FITS file to inspect')
parser.add_argument('-d', '--detailed', action='store_true',
help='Show detailed statistics for each HDU')
parser.add_argument('-e', '--ext', type=str, default=None,
help='Show details for specific extension only (number or name)')
args = parser.parse_args()
# Convert extension to int if numeric
ext = args.ext
if ext is not None:
try:
ext = int(ext)
except ValueError:
pass # Keep as string for extension name
print_fits_info(args.filename, detailed=args.detailed, ext=ext)
if __name__ == '__main__':
main()